diff --git a/gatsby-browser.js b/gatsby-browser.js index fe9d2ca4..5331a52a 100644 --- a/gatsby-browser.js +++ b/gatsby-browser.js @@ -8,6 +8,6 @@ require('prismjs/themes/prism.css'); exports.onClientEntry = () => { window.onload = () => { - brython({ debug: 1, indexedDB: false }); + brython({ debug: 1, indexedDB: false, pythonpath:'/' }); }; }; diff --git a/static/brython/brython.js b/static/brython/brython.js index 5c16c1fd..71582d75 100644 --- a/static/brython/brython.js +++ b/static/brython/brython.js @@ -1,17 +1,23 @@ // brython.js brython.info -// version [3, 7, 0, 'final', 0] -// implementation [3, 7, 4, 'dev', 0] +// version [3, 9, 0, 'final', 0] +// implementation [3, 9, 0, 'final', 0] // version compiled from commented, indented source files at // github.com/brython-dev/brython -!function(r,n){"use strict";function t(r,n,t,e){for(t=void 0!==t?t:0,e=void 0!==e?e:r.length;e>t;){var c=e+t>>1;r[c]>n?e=c:t=c+1}return t}function e(r){if(35>r)return r;var n=t(K,r);return L[n]}function c(r){var n=t(M,r);return N[n]}function u(r){switch(r.charCodeAt(0)){case 9:case 10:case 11:case 12:case 13:case 28:case 29:case 30:case 31:case 32:case 133:case 160:case 5760:case 8192:case 8193:case 8194:case 8195:case 8196:case 8197:case 8198:case 8199:case 8200:case 8201:case 8202:case 8232:case 8233:case 8239:case 8287:case 12288:return!0}return!1}function a(r){var n;return r>=1114112?n=0:(n=e(r>>J),n=c((n<e;e++)t+=String.fromCharCode(I[r+e]);return t}function o(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&T)}function f(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&V)}function h(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&W)}function v(r){var n=r.charCodeAt(0),t=a(n);return t[H]&P?t[F]:-1}function s(r){return v(r)<0?0:1}function d(r){var n=r.charCodeAt(0),t=a(n);return t[H]&Q?t[G]:-1}function A(r){return d(r)<0?0:1}function l(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&Y)}function C(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&X)}function g(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&R)}function p(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&U)}function m(r){var n=r.charCodeAt(0),t=a(n);return t[H]&rn?i(65535&t[D],t[D]>>24):String.fromCharCode(n+t[D])}function S(r){var n=r.charCodeAt(0),t=a(n);return t[H]&rn?i(65535&t[E],t[E]>>24):String.fromCharCode(n+t[E])}function w(r){var n=r.charCodeAt(0),t=a(n);return t[H]&rn?i(65535&t[B],t[B]>>24):String.fromCharCode(n+t[B])}function b(r){var n=r.charCodeAt(0),t=a(n);if(t[H]&rn&&t[D]>>20&7){var e=(65535&t[D])+(t[D]>>24),c=t[D]>>20&7;return i(e,c)}return m(r)}function z(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&$)}function _(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&Z)}function j(r){var n=r.charCodeAt(0),t=a(n);return 0!=(t[H]&O)}function k(r){return j(r)||s(r)||A(r)||l(r)}function q(r,n){var t,e;for(t=n-1;t>=0&&(e=r.charAt(t),_(e));t--);var c=t>=0&&z(e);if(c){for(t=n+1;r.length>t&&(e=r.charAt(t),_(e));t++);c=t==r.length||!z(e)}return String.fromCharCode(c?962:963)}function x(r,n,t){return 931==r.charCodeAt(0)?q(n,t):m(r)}n.unicode=r;var y=[[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,32],[0,0,0,0,0,48],[0,0,0,0,0,1056],[0,0,0,0,0,1024],[0,0,0,0,0,5120],[0,0,0,0,0,3590],[0,0,0,1,1,3590],[0,0,0,2,2,3590],[0,0,0,3,3,3590],[0,0,0,4,4,3590],[0,0,0,5,5,3590],[0,0,0,6,6,3590],[0,0,0,7,7,3590],[0,0,0,8,8,3590],[0,0,0,9,9,3590],[0,32,0,0,0,10113],[0,0,0,0,0,1536],[-32,0,-32,0,0,9993],[0,0,0,0,0,9993],[0,0,0,0,0,4096],[0,0,0,0,2,3076],[0,0,0,0,3,3076],[16777218,17825792,16777218,0,0,26377],[0,0,0,0,0,5632],[0,0,0,0,1,3076],[0,0,0,0,0,3072],[33554438,18874371,33554440,0,0,26377],[121,0,121,0,0,9993],[0,1,0,0,0,10113],[-1,0,-1,0,0,9993],[16777228,33554442,16777228,0,0,26497],[-232,0,-232,0,0,9993],[33554448,18874381,33554448,0,0,26377],[0,-121,0,0,0,10113],[16777236,17825810,16777236,0,0,26377],[195,0,195,0,0,9993],[0,210,0,0,0,10113],[0,206,0,0,0,10113],[0,205,0,0,0,10113],[0,79,0,0,0,10113],[0,202,0,0,0,10113],[0,203,0,0,0,10113],[0,207,0,0,0,10113],[97,0,97,0,0,9993],[0,211,0,0,0,10113],[0,209,0,0,0,10113],[163,0,163,0,0,9993],[0,213,0,0,0,10113],[130,0,130,0,0,9993],[0,214,0,0,0,10113],[0,218,0,0,0,10113],[0,217,0,0,0,10113],[0,219,0,0,0,10113],[0,0,0,0,0,1793],[56,0,56,0,0,9993],[0,2,1,0,0,10113],[-1,1,0,0,0,10049],[-2,0,-1,0,0,9993],[-79,0,-79,0,0,9993],[33554456,18874389,33554456,0,0,26377],[0,-97,0,0,0,10113],[0,-56,0,0,0,10113],[0,-130,0,0,0,10113],[0,10795,0,0,0,10113],[0,-163,0,0,0,10113],[0,10792,0,0,0,10113],[10815,0,10815,0,0,9993],[0,-195,0,0,0,10113],[0,69,0,0,0,10113],[0,71,0,0,0,10113],[10783,0,10783,0,0,9993],[10780,0,10780,0,0,9993],[10782,0,10782,0,0,9993],[-210,0,-210,0,0,9993],[-206,0,-206,0,0,9993],[-205,0,-205,0,0,9993],[-202,0,-202,0,0,9993],[-203,0,-203,0,0,9993],[42319,0,42319,0,0,9993],[42315,0,42315,0,0,9993],[-207,0,-207,0,0,9993],[42280,0,42280,0,0,9993],[42308,0,42308,0,0,9993],[-209,0,-209,0,0,9993],[-211,0,-211,0,0,9993],[10743,0,10743,0,0,9993],[42305,0,42305,0,0,9993],[10749,0,10749,0,0,9993],[-213,0,-213,0,0,9993],[-214,0,-214,0,0,9993],[10727,0,10727,0,0,9993],[-218,0,-218,0,0,9993],[42282,0,42282,0,0,9993],[-69,0,-69,0,0,9993],[-217,0,-217,0,0,9993],[-71,0,-71,0,0,9993],[-219,0,-219,0,0,9993],[42261,0,42261,0,0,9993],[42258,0,42258,0,0,9993],[0,0,0,0,0,14089],[0,0,0,0,0,5889],[16777244,17825818,16777244,0,0,30216],[0,0,0,0,0,13321],[0,116,0,0,0,10113],[0,38,0,0,0,10113],[0,37,0,0,0,10113],[0,64,0,0,0,10113],[0,63,0,0,0,10113],[50331681,19922973,50331681,0,0,26377],[-38,0,-38,0,0,9993],[-37,0,-37,0,0,9993],[50331688,19922980,50331688,0,0,26377],[16777261,17825835,16777261,0,0,26377],[-64,0,-64,0,0,9993],[-63,0,-63,0,0,9993],[0,8,0,0,0,10113],[16777264,17825838,16777264,0,0,26377],[16777267,17825841,16777267,0,0,26377],[0,0,0,0,0,10113],[16777270,17825844,16777270,0,0,26377],[16777273,17825847,16777273,0,0,26377],[-8,0,-8,0,0,9993],[16777276,17825850,16777276,0,0,26377],[16777279,17825853,16777279,0,0,26377],[7,0,7,0,0,9993],[-116,0,-116,0,0,9993],[0,-60,0,0,0,10113],[16777282,17825856,16777282,0,0,26377],[0,-7,0,0,0,10113],[0,80,0,0,0,10113],[-80,0,-80,0,0,9993],[0,15,0,0,0,10113],[-15,0,-15,0,0,9993],[0,48,0,0,0,10113],[-48,0,-48,0,0,9993],[33554502,18874435,33554504,0,0,26377],[0,0,0,0,0,1537],[0,7264,0,0,0,10113],[0,0,0,0,1,3588],[0,0,0,0,2,3588],[0,0,0,0,3,3588],[0,0,0,0,4,3588],[0,0,0,0,5,3588],[0,0,0,0,6,3588],[0,0,0,0,7,3588],[0,0,0,0,8,3588],[0,0,0,0,9,3588],[16777292,17825866,16777292,0,0,26497],[16777295,17825869,16777295,0,0,26497],[16777298,17825872,16777298,0,0,26497],[16777301,17825875,16777301,0,0,26497],[16777304,17825878,16777304,0,0,26497],[16777307,17825881,16777307,0,0,26497],[16777310,17825884,16777310,0,0,26497],[16777313,17825887,16777313,0,0,26497],[16777316,17825890,16777316,0,0,26497],[16777319,17825893,16777319,0,0,26497],[16777322,17825896,16777322,0,0,26497],[16777325,17825899,16777325,0,0,26497],[16777328,17825902,16777328,0,0,26497],[16777331,17825905,16777331,0,0,26497],[16777334,17825908,16777334,0,0,26497],[16777337,17825911,16777337,0,0,26497],[16777340,17825914,16777340,0,0,26497],[16777343,17825917,16777343,0,0,26497],[16777346,17825920,16777346,0,0,26497],[16777349,17825923,16777349,0,0,26497],[16777352,17825926,16777352,0,0,26497],[16777355,17825929,16777355,0,0,26497],[16777358,17825932,16777358,0,0,26497],[16777361,17825935,16777361,0,0,26497],[16777364,17825938,16777364,0,0,26497],[16777367,17825941,16777367,0,0,26497],[16777370,17825944,16777370,0,0,26497],[16777373,17825947,16777373,0,0,26497],[16777376,17825950,16777376,0,0,26497],[16777379,17825953,16777379,0,0,26497],[16777382,17825956,16777382,0,0,26497],[16777385,17825959,16777385,0,0,26497],[16777388,17825962,16777388,0,0,26497],[16777391,17825965,16777391,0,0,26497],[16777394,17825968,16777394,0,0,26497],[16777397,17825971,16777397,0,0,26497],[16777400,17825974,16777400,0,0,26497],[16777403,17825977,16777403,0,0,26497],[16777406,17825980,16777406,0,0,26497],[16777409,17825983,16777409,0,0,26497],[16777412,17825986,16777412,0,0,26497],[16777415,17825989,16777415,0,0,26497],[16777418,17825992,16777418,0,0,26497],[16777421,17825995,16777421,0,0,26497],[16777424,17825998,16777424,0,0,26497],[16777427,17826001,16777427,0,0,26497],[16777430,17826004,16777430,0,0,26497],[16777433,17826007,16777433,0,0,26497],[16777436,17826010,16777436,0,0,26497],[16777439,17826013,16777439,0,0,26497],[16777442,17826016,16777442,0,0,26497],[16777445,17826019,16777445,0,0,26497],[16777448,17826022,16777448,0,0,26497],[16777451,17826025,16777451,0,0,26497],[16777454,17826028,16777454,0,0,26497],[16777457,17826031,16777457,0,0,26497],[16777460,17826034,16777460,0,0,26497],[16777463,17826037,16777463,0,0,26497],[16777466,17826040,16777466,0,0,26497],[16777469,17826043,16777469,0,0,26497],[16777472,17826046,16777472,0,0,26497],[16777475,17826049,16777475,0,0,26497],[16777478,17826052,16777478,0,0,26497],[16777481,17826055,16777481,0,0,26497],[16777484,17826058,16777484,0,0,26497],[16777487,17826061,16777487,0,0,26497],[16777490,17826064,16777490,0,0,26497],[16777493,17826067,16777493,0,0,26497],[16777496,17826070,16777496,0,0,26497],[16777499,17826073,16777499,0,0,26497],[16777502,17826076,16777502,0,0,26497],[16777505,17826079,16777505,0,0,26497],[16777508,17826082,16777508,0,0,26497],[16777511,17826085,16777511,0,0,26497],[16777514,17826088,16777514,0,0,26497],[16777517,17826091,16777517,0,0,26497],[16777520,17826094,16777520,0,0,26497],[16777523,17826097,16777523,0,0,26497],[16777526,17826100,16777526,0,0,26497],[16777529,17826103,16777529,0,0,26497],[16777532,17826106,16777532,0,0,26497],[16777535,17826109,16777535,0,0,26497],[16777538,17826112,16777538,0,0,26497],[16777541,17826115,16777541,0,0,26497],[16777544,17826118,16777544,0,0,26497],[16777547,17826121,16777547,0,0,26497],[16777550,17826124,16777550,0,0,26377],[16777553,17826127,16777553,0,0,26377],[16777556,17826130,16777556,0,0,26377],[16777559,17826133,16777559,0,0,26377],[16777562,17826136,16777562,0,0,26377],[16777565,17826139,16777565,0,0,26377],[0,0,0,0,0,3840],[35332,0,35332,0,0,9993],[3814,0,3814,0,0,9993],[33554785,18874718,33554785,0,0,26377],[33554790,18874723,33554790,0,0,26377],[33554795,18874728,33554795,0,0,26377],[33554800,18874733,33554800,0,0,26377],[33554805,18874738,33554805,0,0,26377],[16777593,17826167,16777593,0,0,26377],[16777597,18874746,16777597,0,0,26497],[8,0,8,0,0,9993],[0,-8,0,0,0,10113],[33554817,18874750,33554817,0,0,26377],[50332039,19923331,50332039,0,0,26377],[50332046,19923338,50332046,0,0,26377],[50332053,19923345,50332053,0,0,26377],[74,0,74,0,0,9993],[86,0,86,0,0,9993],[100,0,100,0,0,9993],[128,0,128,0,0,9993],[112,0,112,0,0,9993],[126,0,126,0,0,9993],[33554843,18874776,16777629,0,0,26377],[33554849,18874782,16777635,0,0,26377],[33554855,18874788,16777641,0,0,26377],[33554861,18874794,16777647,0,0,26377],[33554867,18874800,16777653,0,0,26377],[33554873,18874806,16777659,0,0,26377],[33554879,18874812,16777665,0,0,26377],[33554885,18874818,16777671,0,0,26377],[33554891,18874824,16777677,0,0,26433],[33554897,18874830,16777683,0,0,26433],[33554903,18874836,16777689,0,0,26433],[33554909,18874842,16777695,0,0,26433],[33554915,18874848,16777701,0,0,26433],[33554921,18874854,16777707,0,0,26433],[33554927,18874860,16777713,0,0,26433],[33554933,18874866,16777719,0,0,26433],[33554939,18874872,16777725,0,0,26377],[33554945,18874878,16777731,0,0,26377],[33554951,18874884,16777737,0,0,26377],[33554957,18874890,16777743,0,0,26377],[33554963,18874896,16777749,0,0,26377],[33554969,18874902,16777755,0,0,26377],[33554975,18874908,16777761,0,0,26377],[33554981,18874914,16777767,0,0,26377],[33554987,18874920,16777773,0,0,26433],[33554993,18874926,16777779,0,0,26433],[33554999,18874932,16777785,0,0,26433],[33555005,18874938,16777791,0,0,26433],[33555011,18874944,16777797,0,0,26433],[33555017,18874950,16777803,0,0,26433],[33555023,18874956,16777809,0,0,26433],[33555029,18874962,16777815,0,0,26433],[33555035,18874968,16777821,0,0,26377],[33555041,18874974,16777827,0,0,26377],[33555047,18874980,16777833,0,0,26377],[33555053,18874986,16777839,0,0,26377],[33555059,18874992,16777845,0,0,26377],[33555065,18874998,16777851,0,0,26377],[33555071,18875004,16777857,0,0,26377],[33555077,18875010,16777863,0,0,26377],[33555083,18875016,16777869,0,0,26433],[33555089,18875022,16777875,0,0,26433],[33555095,18875028,16777881,0,0,26433],[33555101,18875034,16777887,0,0,26433],[33555107,18875040,16777893,0,0,26433],[33555113,18875046,16777899,0,0,26433],[33555119,18875052,16777905,0,0,26433],[33555125,18875058,16777911,0,0,26433],[33555131,18875064,33555133,0,0,26377],[33555138,18875071,16777924,0,0,26377],[33555144,18875077,33555146,0,0,26377],[33555151,18875084,33555151,0,0,26377],[50332373,19923665,50332376,0,0,26377],[0,-74,0,0,0,10113],[33555166,18875099,16777952,0,0,26433],[16777955,17826529,16777955,0,0,26377],[33555175,18875108,33555177,0,0,26377],[33555182,18875115,16777968,0,0,26377],[33555188,18875121,33555190,0,0,26377],[33555195,18875128,33555195,0,0,26377],[50332417,19923709,50332420,0,0,26377],[0,-86,0,0,0,10113],[33555210,18875143,16777996,0,0,26433],[50332433,19923725,50332433,0,0,26377],[50332440,19923732,50332440,0,0,26377],[33555230,18875163,33555230,0,0,26377],[50332452,19923744,50332452,0,0,26377],[0,-100,0,0,0,10113],[50332459,19923751,50332459,0,0,26377],[50332466,19923758,50332466,0,0,26377],[33555256,18875189,33555256,0,0,26377],[33555261,18875194,33555261,0,0,26377],[50332483,19923775,50332483,0,0,26377],[0,-112,0,0,0,10113],[33555273,18875206,33555275,0,0,26377],[33555280,18875213,16778066,0,0,26377],[33555286,18875219,33555288,0,0,26377],[33555293,18875226,33555293,0,0,26377],[50332515,19923807,50332518,0,0,26377],[0,-128,0,0,0,10113],[0,-126,0,0,0,10113],[33555308,18875241,16778094,0,0,26433],[0,0,0,0,0,3076],[0,0,0,0,4,3076],[0,0,0,0,5,3076],[0,0,0,0,6,3076],[0,0,0,0,7,3076],[0,0,0,0,8,3076],[0,0,0,0,9,3076],[0,0,0,0,0,1792],[0,-7517,0,0,0,10113],[0,-8383,0,0,0,10113],[0,-8262,0,0,0,10113],[0,28,0,0,0,10113],[-28,0,-28,0,0,9993],[0,16,0,0,0,12160],[-16,0,-16,0,0,12040],[0,26,0,0,0,9344],[-26,0,-26,0,0,9224],[0,-10743,0,0,0,10113],[0,-3814,0,0,0,10113],[0,-10727,0,0,0,10113],[-10795,0,-10795,0,0,9993],[-10792,0,-10792,0,0,9993],[0,-10780,0,0,0,10113],[0,-10749,0,0,0,10113],[0,-10783,0,0,0,10113],[0,-10782,0,0,0,10113],[0,-10815,0,0,0,10113],[-7264,0,-7264,0,0,9993],[0,0,0,0,0,5121],[0,0,0,0,0,3841],[0,-35332,0,0,0,10113],[0,-42280,0,0,0,10113],[0,-42308,0,0,0,10113],[0,-42319,0,0,0,10113],[0,-42315,0,0,0,10113],[0,-42305,0,0,0,10113],[0,-42258,0,0,0,10113],[0,-42282,0,0,0,10113],[0,-42261,0,0,0,10113],[0,928,0,0,0,10113],[-928,0,-928,0,0,9993],[16778097,17826671,16778097,0,0,26377],[16778100,17826674,16778100,0,0,26377],[16778103,17826677,16778103,0,0,26377],[16778106,17826680,16778106,0,0,26377],[16778109,17826683,16778109,0,0,26377],[16778112,17826686,16778112,0,0,26377],[16778115,17826689,16778115,0,0,26377],[16778118,17826692,16778118,0,0,26377],[16778121,17826695,16778121,0,0,26377],[16778124,17826698,16778124,0,0,26377],[16778127,17826701,16778127,0,0,26377],[16778130,17826704,16778130,0,0,26377],[16778133,17826707,16778133,0,0,26377],[16778136,17826710,16778136,0,0,26377],[16778139,17826713,16778139,0,0,26377],[16778142,17826716,16778142,0,0,26377],[16778145,17826719,16778145,0,0,26377],[16778148,17826722,16778148,0,0,26377],[16778151,17826725,16778151,0,0,26377],[16778154,17826728,16778154,0,0,26377],[16778157,17826731,16778157,0,0,26377],[16778160,17826734,16778160,0,0,26377],[16778163,17826737,16778163,0,0,26377],[16778166,17826740,16778166,0,0,26377],[16778169,17826743,16778169,0,0,26377],[16778172,17826746,16778172,0,0,26377],[16778175,17826749,16778175,0,0,26377],[16778178,17826752,16778178,0,0,26377],[16778181,17826755,16778181,0,0,26377],[16778184,17826758,16778184,0,0,26377],[16778187,17826761,16778187,0,0,26377],[16778190,17826764,16778190,0,0,26377],[16778193,17826767,16778193,0,0,26377],[16778196,17826770,16778196,0,0,26377],[16778199,17826773,16778199,0,0,26377],[16778202,17826776,16778202,0,0,26377],[16778205,17826779,16778205,0,0,26377],[16778208,17826782,16778208,0,0,26377],[16778211,17826785,16778211,0,0,26377],[16778214,17826788,16778214,0,0,26377],[16778217,17826791,16778217,0,0,26377],[16778220,17826794,16778220,0,0,26377],[16778223,17826797,16778223,0,0,26377],[16778226,17826800,16778226,0,0,26377],[16778229,17826803,16778229,0,0,26377],[16778232,17826806,16778232,0,0,26377],[16778235,17826809,16778235,0,0,26377],[16778238,17826812,16778238,0,0,26377],[16778241,17826815,16778241,0,0,26377],[16778244,17826818,16778244,0,0,26377],[16778247,17826821,16778247,0,0,26377],[16778250,17826824,16778250,0,0,26377],[16778253,17826827,16778253,0,0,26377],[16778256,17826830,16778256,0,0,26377],[16778259,17826833,16778259,0,0,26377],[16778262,17826836,16778262,0,0,26377],[16778265,17826839,16778265,0,0,26377],[16778268,17826842,16778268,0,0,26377],[16778271,17826845,16778271,0,0,26377],[16778274,17826848,16778274,0,0,26377],[16778277,17826851,16778277,0,0,26377],[16778280,17826854,16778280,0,0,26377],[16778283,17826857,16778283,0,0,26377],[16778286,17826860,16778286,0,0,26377],[16778289,17826863,16778289,0,0,26377],[16778292,17826866,16778292,0,0,26377],[16778295,17826869,16778295,0,0,26377],[16778298,17826872,16778298,0,0,26377],[16778301,17826875,16778301,0,0,26377],[16778304,17826878,16778304,0,0,26377],[16778307,17826881,16778307,0,0,26377],[16778310,17826884,16778310,0,0,26377],[16778313,17826887,16778313,0,0,26377],[16778316,17826890,16778316,0,0,26377],[16778319,17826893,16778319,0,0,26377],[16778322,17826896,16778322,0,0,26377],[16778325,17826899,16778325,0,0,26377],[16778328,17826902,16778328,0,0,26377],[16778331,17826905,16778331,0,0,26377],[16778334,17826908,16778334,0,0,26377],[33555554,18875487,33555556,0,0,26377],[33555561,18875494,33555563,0,0,26377],[33555568,18875501,33555570,0,0,26377],[50332792,19924084,50332795,0,0,26377],[50332802,19924094,50332805,0,0,26377],[33555595,18875528,33555597,0,0,26377],[33555602,18875535,33555604,0,0,26377],[33555609,18875542,33555611,0,0,26377],[33555616,18875549,33555618,0,0,26377],[33555623,18875556,33555625,0,0,26377],[33555630,18875563,33555632,0,0,26377],[33555637,18875570,33555639,0,0,26377],[0,0,0,0,0,1025],[0,0,0,0,0,5633],[0,40,0,0,0,10113],[-40,0,-40,0,0,9993],[0,0,0,0,0,9344]],B=0,D=1,E=2,F=3,G=4,H=5,I=[181,956,924,223,115,115,83,83,83,115,105,775,304,329,700,110,700,78,383,115,83,496,106,780,74,780,837,953,921,912,953,776,769,921,776,769,944,965,776,769,933,776,769,962,963,931,976,946,914,977,952,920,981,966,934,982,960,928,1008,954,922,1009,961,929,1013,949,917,1415,1381,1410,1333,1362,1333,1410,43888,5024,5024,43889,5025,5025,43890,5026,5026,43891,5027,5027,43892,5028,5028,43893,5029,5029,43894,5030,5030,43895,5031,5031,43896,5032,5032,43897,5033,5033,43898,5034,5034,43899,5035,5035,43900,5036,5036,43901,5037,5037,43902,5038,5038,43903,5039,5039,43904,5040,5040,43905,5041,5041,43906,5042,5042,43907,5043,5043,43908,5044,5044,43909,5045,5045,43910,5046,5046,43911,5047,5047,43912,5048,5048,43913,5049,5049,43914,5050,5050,43915,5051,5051,43916,5052,5052,43917,5053,5053,43918,5054,5054,43919,5055,5055,43920,5056,5056,43921,5057,5057,43922,5058,5058,43923,5059,5059,43924,5060,5060,43925,5061,5061,43926,5062,5062,43927,5063,5063,43928,5064,5064,43929,5065,5065,43930,5066,5066,43931,5067,5067,43932,5068,5068,43933,5069,5069,43934,5070,5070,43935,5071,5071,43936,5072,5072,43937,5073,5073,43938,5074,5074,43939,5075,5075,43940,5076,5076,43941,5077,5077,43942,5078,5078,43943,5079,5079,43944,5080,5080,43945,5081,5081,43946,5082,5082,43947,5083,5083,43948,5084,5084,43949,5085,5085,43950,5086,5086,43951,5087,5087,43952,5088,5088,43953,5089,5089,43954,5090,5090,43955,5091,5091,43956,5092,5092,43957,5093,5093,43958,5094,5094,43959,5095,5095,43960,5096,5096,43961,5097,5097,43962,5098,5098,43963,5099,5099,43964,5100,5100,43965,5101,5101,43966,5102,5102,43967,5103,5103,5112,5104,5104,5113,5105,5105,5114,5106,5106,5115,5107,5107,5116,5108,5108,5117,5109,5109,5112,5104,5104,5113,5105,5105,5114,5106,5106,5115,5107,5107,5116,5108,5108,5117,5109,5109,7830,104,817,72,817,7831,116,776,84,776,7832,119,778,87,778,7833,121,778,89,778,7834,97,702,65,702,7835,7777,7776,223,115,115,7838,8016,965,787,933,787,8018,965,787,768,933,787,768,8020,965,787,769,933,787,769,8022,965,787,834,933,787,834,8064,7936,953,7944,921,8072,8065,7937,953,7945,921,8073,8066,7938,953,7946,921,8074,8067,7939,953,7947,921,8075,8068,7940,953,7948,921,8076,8069,7941,953,7949,921,8077,8070,7942,953,7950,921,8078,8071,7943,953,7951,921,8079,8064,7936,953,7944,921,8072,8065,7937,953,7945,921,8073,8066,7938,953,7946,921,8074,8067,7939,953,7947,921,8075,8068,7940,953,7948,921,8076,8069,7941,953,7949,921,8077,8070,7942,953,7950,921,8078,8071,7943,953,7951,921,8079,8080,7968,953,7976,921,8088,8081,7969,953,7977,921,8089,8082,7970,953,7978,921,8090,8083,7971,953,7979,921,8091,8084,7972,953,7980,921,8092,8085,7973,953,7981,921,8093,8086,7974,953,7982,921,8094,8087,7975,953,7983,921,8095,8080,7968,953,7976,921,8088,8081,7969,953,7977,921,8089,8082,7970,953,7978,921,8090,8083,7971,953,7979,921,8091,8084,7972,953,7980,921,8092,8085,7973,953,7981,921,8093,8086,7974,953,7982,921,8094,8087,7975,953,7983,921,8095,8096,8032,953,8040,921,8104,8097,8033,953,8041,921,8105,8098,8034,953,8042,921,8106,8099,8035,953,8043,921,8107,8100,8036,953,8044,921,8108,8101,8037,953,8045,921,8109,8102,8038,953,8046,921,8110,8103,8039,953,8047,921,8111,8096,8032,953,8040,921,8104,8097,8033,953,8041,921,8105,8098,8034,953,8042,921,8106,8099,8035,953,8043,921,8107,8100,8036,953,8044,921,8108,8101,8037,953,8045,921,8109,8102,8038,953,8046,921,8110,8103,8039,953,8047,921,8111,8114,8048,953,8122,921,8122,837,8115,945,953,913,921,8124,8116,940,953,902,921,902,837,8118,945,834,913,834,8119,945,834,953,913,834,921,913,834,837,8115,945,953,913,921,8124,8126,953,921,8130,8052,953,8138,921,8138,837,8131,951,953,919,921,8140,8132,942,953,905,921,905,837,8134,951,834,919,834,8135,951,834,953,919,834,921,919,834,837,8131,951,953,919,921,8140,8146,953,776,768,921,776,768,8147,953,776,769,921,776,769,8150,953,834,921,834,8151,953,776,834,921,776,834,8162,965,776,768,933,776,768,8163,965,776,769,933,776,769,8164,961,787,929,787,8166,965,834,933,834,8167,965,776,834,933,776,834,8178,8060,953,8186,921,8186,837,8179,969,953,937,921,8188,8180,974,953,911,921,911,837,8182,969,834,937,834,8183,969,834,953,937,834,921,937,834,837,8179,969,953,937,921,8188,43888,5024,5024,43889,5025,5025,43890,5026,5026,43891,5027,5027,43892,5028,5028,43893,5029,5029,43894,5030,5030,43895,5031,5031,43896,5032,5032,43897,5033,5033,43898,5034,5034,43899,5035,5035,43900,5036,5036,43901,5037,5037,43902,5038,5038,43903,5039,5039,43904,5040,5040,43905,5041,5041,43906,5042,5042,43907,5043,5043,43908,5044,5044,43909,5045,5045,43910,5046,5046,43911,5047,5047,43912,5048,5048,43913,5049,5049,43914,5050,5050,43915,5051,5051,43916,5052,5052,43917,5053,5053,43918,5054,5054,43919,5055,5055,43920,5056,5056,43921,5057,5057,43922,5058,5058,43923,5059,5059,43924,5060,5060,43925,5061,5061,43926,5062,5062,43927,5063,5063,43928,5064,5064,43929,5065,5065,43930,5066,5066,43931,5067,5067,43932,5068,5068,43933,5069,5069,43934,5070,5070,43935,5071,5071,43936,5072,5072,43937,5073,5073,43938,5074,5074,43939,5075,5075,43940,5076,5076,43941,5077,5077,43942,5078,5078,43943,5079,5079,43944,5080,5080,43945,5081,5081,43946,5082,5082,43947,5083,5083,43948,5084,5084,43949,5085,5085,43950,5086,5086,43951,5087,5087,43952,5088,5088,43953,5089,5089,43954,5090,5090,43955,5091,5091,43956,5092,5092,43957,5093,5093,43958,5094,5094,43959,5095,5095,43960,5096,5096,43961,5097,5097,43962,5098,5098,43963,5099,5099,43964,5100,5100,43965,5101,5101,43966,5102,5102,43967,5103,5103,64256,102,102,70,70,70,102,64257,102,105,70,73,70,105,64258,102,108,70,76,70,108,64259,102,102,105,70,70,73,70,102,105,64260,102,102,108,70,70,76,70,102,108,64261,115,116,83,84,83,116,64262,115,116,83,84,83,116,64275,1396,1398,1348,1350,1348,1398,64276,1396,1381,1348,1333,1348,1381,64277,1396,1387,1348,1339,1348,1387,64278,1406,1398,1358,1350,1358,1398,64279,1396,1389,1348,1341,1348,1389],J=7,K=[36,37,38,39,40,41,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,71,72,73,74,78,79,80,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,104,105,106,112,113,118,119,155,156,157,158,159,162,163,166,167,168,173,174,177,178,188,189,190,191,197,198,199,207,208,222,223,231,232,236,237,257,258,264,265,281,282,283,300,301,302,319,320,321,329,330,332,333,334,335,336,337,338,339,340,341,342,343,344,431,432,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,526,527,528,529,530,531,532,533,534,535,536,537,538,540,541,544,545,546,547,548,549,550,551,553,554,555,556,557,558,559,561,562,565,566,576,583,584,585,586,587,608,616,617,648,652,653,720,724,725,726,727,728,734,735,736,864,865,888,889,890,928,929,930,931,932,933,934,935,936,937,938,939,940,941,942,943,944,948,949,950,976,977,978,988,989,990,992,993,994,995,996,997,998,999,1e3,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1024,1025,1026,1027,1042,1043,1044,1045,1046,1047,1095,1096,1107,1108,1142,1143,1220,1221,1357,1358,1390,1391,1392,1393,1437,1438,1520,1521,1522,1524,1525,7168,7169,7170,7171,7172,7680,8191,8192,8703,8704],L=[34,35,36,37,38,39,34,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,64,68,69,64,70,71,72,73,74,75,76,77,64,78,79,80,81,82,83,84,64,85,86,34,87,34,88,34,89,90,91,92,34,93,34,94,95,34,96,34,97,34,98,99,100,34,101,102,34,103,34,104,34,105,34,101,34,104,34,106,34,107,108,34,109,110,34,111,112,34,113,34,114,115,116,117,118,119,120,121,122,123,124,125,34,126,127,128,129,130,131,132,133,134,34,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,145,34,152,145,153,154,155,156,157,158,159,160,161,162,145,163,145,164,165,166,167,168,169,170,145,171,145,172,173,174,175,145,176,145,177,145,34,178,179,34,180,145,34,181,145,34,182,145,34,183,184,185,186,145,187,188,145,189,145,190,191,145,64,192,193,194,195,145,196,145,197,198,199,200,201,202,203,204,64,205,206,145,34,207,145,208,209,145,210,211,212,213,214,145,64,215,64,216,217,64,218,219,220,221,222,223,224,145,225,226,227,34,87,228,34,229,230,34,231,34,232,34,233,34,234,34,235,34,236,34,237,34,238,145,34,231,34,239,145,240,145,241,242,145,127,243,127,243],M=[9,10,14,28,31,32,33,39,40,46,47,48,49,50,51,52,53,54,55,56,57,58,59,65,91,94,95,96,97,123,127,133,134,160,161,168,169,170,171,173,174,175,176,178,179,180,181,182,183,184,185,186,187,188,191,192,215,216,223,224,247,248,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384,385,386,387,388,389,390,391,392,393,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,428,429,430,431,432,433,435,436,437,438,439,440,441,442,443,444,445,446,447,448,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,570,571,572,573,574,575,577,578,579,580,581,582,583,584,585,586,587,588,589,590,591,592,593,594,595,596,597,598,600,601,602,603,604,605,608,609,610,611,612,613,614,615,616,617,618,619,620,621,623,624,625,626,627,629,630,637,638,640,641,643,644,647,648,649,650,652,653,658,659,660,661,669,670,671,688,697,704,706,710,722,736,741,748,749,750,751,768,837,838,880,881,882,883,884,885,886,887,888,890,891,894,895,896,900,902,903,904,907,908,909,910,912,913,930,931,940,941,944,945,962,963,972,973,975,976,977,978,981,982,983,984,985,986,987,988,989,990,991,992,993,994,995,996,997,998,999,1e3,1001,1002,1003,1004,1005,1006,1007,1008,1009,1010,1011,1012,1013,1014,1015,1016,1017,1018,1019,1020,1021,1024,1040,1072,1104,1120,1121,1122,1123,1124,1125,1126,1127,1128,1129,1130,1131,1132,1133,1134,1135,1136,1137,1138,1139,1140,1141,1142,1143,1144,1145,1146,1147,1148,1149,1150,1151,1152,1153,1154,1155,1160,1162,1163,1164,1165,1166,1167,1168,1169,1170,1171,1172,1173,1174,1175,1176,1177,1178,1179,1180,1181,1182,1183,1184,1185,1186,1187,1188,1189,1190,1191,1192,1193,1194,1195,1196,1197,1198,1199,1200,1201,1202,1203,1204,1205,1206,1207,1208,1209,1210,1211,1212,1213,1214,1215,1216,1217,1218,1219,1220,1221,1222,1223,1224,1225,1226,1227,1228,1229,1230,1231,1232,1233,1234,1235,1236,1237,1238,1239,1240,1241,1242,1243,1244,1245,1246,1247,1248,1249,1250,1251,1252,1253,1254,1255,1256,1257,1258,1259,1260,1261,1262,1263,1264,1265,1266,1267,1268,1269,1270,1271,1272,1273,1274,1275,1276,1277,1278,1279,1280,1281,1282,1283,1284,1285,1286,1287,1288,1289,1290,1291,1292,1293,1294,1295,1296,1297,1298,1299,1300,1301,1302,1303,1304,1305,1306,1307,1308,1309,1310,1311,1312,1313,1314,1315,1316,1317,1318,1319,1320,1321,1322,1323,1324,1325,1326,1327,1328,1329,1367,1369,1370,1376,1377,1415,1416,1417,1419,1421,1424,1425,1470,1471,1472,1473,1475,1476,1478,1479,1480,1488,1515,1520,1523,1524,1525,1536,1542,1552,1563,1564,1565,1566,1568,1600,1601,1611,1632,1633,1634,1635,1636,1637,1638,1639,1640,1641,1642,1646,1648,1649,1748,1749,1750,1757,1758,1759,1765,1767,1769,1770,1774,1776,1777,1778,1779,1780,1781,1782,1783,1784,1785,1786,1789,1791,1792,1806,1807,1808,1809,1810,1840,1867,1869,1958,1969,1970,1984,1985,1986,1987,1988,1989,1990,1991,1992,1993,1994,2027,2036,2038,2042,2043,2048,2070,2074,2075,2084,2085,2088,2089,2094,2096,2111,2112,2137,2140,2142,2143,2208,2229,2275,2307,2308,2362,2363,2364,2365,2366,2369,2377,2381,2382,2384,2385,2392,2402,2404,2406,2407,2408,2409,2410,2411,2412,2413,2414,2415,2416,2417,2418,2433,2434,2436,2437,2445,2447,2449,2451,2473,2474,2481,2482,2483,2486,2490,2492,2493,2494,2497,2501,2503,2505,2507,2509,2510,2511,2519,2520,2524,2526,2527,2530,2532,2534,2535,2536,2537,2538,2539,2540,2541,2542,2543,2544,2546,2548,2554,2556,2561,2563,2564,2565,2571,2575,2577,2579,2601,2602,2609,2610,2612,2613,2615,2616,2618,2620,2621,2622,2625,2627,2631,2633,2635,2638,2641,2642,2649,2653,2654,2655,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2674,2677,2678,2689,2691,2692,2693,2702,2703,2706,2707,2729,2730,2737,2738,2740,2741,2746,2748,2749,2750,2753,2758,2759,2761,2762,2763,2765,2766,2768,2769,2784,2786,2788,2790,2791,2792,2793,2794,2795,2796,2797,2798,2799,2800,2802,2809,2810,2817,2818,2820,2821,2829,2831,2833,2835,2857,2858,2865,2866,2868,2869,2874,2876,2877,2878,2879,2880,2881,2885,2887,2889,2891,2893,2894,2902,2903,2904,2908,2910,2911,2914,2916,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,2936,2946,2947,2948,2949,2955,2958,2961,2962,2966,2969,2971,2972,2973,2974,2976,2979,2981,2984,2987,2990,3002,3006,3008,3009,3011,3014,3017,3018,3021,3022,3024,3025,3031,3032,3046,3047,3048,3049,3050,3051,3052,3053,3054,3055,3056,3059,3067,3072,3073,3076,3077,3085,3086,3089,3090,3113,3114,3130,3133,3134,3137,3141,3142,3145,3146,3150,3157,3159,3160,3163,3168,3170,3172,3174,3175,3176,3177,3178,3179,3180,3181,3182,3183,3184,3192,3199,3200,3201,3202,3204,3205,3213,3214,3217,3218,3241,3242,3252,3253,3258,3260,3261,3262,3263,3264,3269,3270,3271,3273,3274,3276,3278,3285,3287,3294,3295,3296,3298,3300,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,3312,3313,3315,3329,3330,3332,3333,3341,3342,3345,3346,3387,3389,3390,3393,3397,3398,3401,3402,3405,3406,3407,3415,3416,3423,3426,3428,3430,3431,3432,3433,3434,3435,3436,3437,3438,3439,3440,3446,3449,3450,3456,3458,3460,3461,3479,3482,3506,3507,3516,3517,3518,3520,3527,3530,3531,3535,3538,3541,3542,3543,3544,3552,3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3570,3572,3573,3585,3633,3634,3635,3636,3643,3647,3648,3654,3655,3663,3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3676,3713,3715,3716,3717,3719,3721,3722,3723,3725,3726,3732,3736,3737,3744,3745,3748,3749,3750,3751,3752,3754,3756,3757,3761,3762,3763,3764,3770,3771,3773,3774,3776,3781,3782,3783,3784,3790,3792,3793,3794,3795,3796,3797,3798,3799,3800,3801,3802,3804,3808,3840,3841,3864,3866,3872,3873,3874,3875,3876,3877,3878,3879,3880,3881,3882,3892,3893,3894,3895,3896,3897,3898,3902,3904,3912,3913,3949,3953,3967,3968,3973,3974,3976,3981,3992,3993,4029,4030,4038,4039,4045,4046,4059,4096,4139,4141,4145,4146,4152,4153,4155,4157,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4176,4182,4184,4186,4190,4193,4194,4197,4199,4206,4209,4213,4226,4227,4229,4231,4237,4238,4239,4240,4241,4242,4243,4244,4245,4246,4247,4248,4249,4250,4253,4254,4256,4294,4295,4296,4301,4302,4304,4347,4348,4349,4553,4554,4558,4560,4567,4568,4569,4570,4574,4576,4617,4618,4622,4624,4657,4658,4662,4664,4671,4672,4673,4674,4678,4680,4695,4696,4753,4754,4758,4760,4827,4829,4832,4841,4842,4843,4844,4845,4846,4847,4848,4849,4850,4861,4864,4880,4890,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,4910,4911,4912,4913,4914,4915,4916,4917,4918,4919,4920,4921,4922,4923,4924,4925,4926,4927,4928,4929,4930,4931,4932,4933,4934,4935,4936,4937,4938,4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,4981,4982,4984,4985,4986,4987,4988,4989,4990,4992,4993,5229,5231,5248,5249,5275,5277,5280,5355,5358,5361,5369,5376,5389,5390,5394,5397,5408,5426,5429,5431,5440,5458,5460,5472,5485,5486,5489,5490,5492,5504,5556,5558,5559,5566,5574,5575,5577,5588,5591,5592,5596,5597,5598,5600,5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5616,5626,5632,5643,5646,5647,5648,5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5664,5699,5700,5752,5760,5801,5802,5803,5808,5878,5888,5919,5920,5923,5927,5929,5932,5936,5938,5939,5945,5948,5952,5953,5956,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5998,6e3,6005,6016,6060,6064,6090,6096,6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6110,6144,6167,6169,6171,6172,6174,6176,6229,6230,6231,6232,6239,6240,6241,6242,6243,6245,6253,6259,6269,6271,6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6304,6311,6312,6318,6320,6334,6335,6400,6404,6405,6452,6453,6454,6459,6460,6461,6466,6467,6469,6476,6480,6481,6482,6483,6484,6485,6486,6487,6488,6489,6490,6507,6516,6525,6528,6530,6531,6561,6562,6566,6568,6570,6571,6574,6576,6577,6578,6579,6580,6581,6582,6583,6584,6585,6586,6630,6631,6632,6634,6637,6638,6639,6642,6644,6652,6656,6692,6700,6708,6710,6712,6715,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6733,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,6746,6776,6782,6784,6848,6856,6864,6867,6868,6881,6882,6889,6893,6894,6898,6900,6901,6903,6904,6906,6912,6956,7019,7032,7033,7034,7037,7038,7067,7104,7158,7164,7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213,7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,7313,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,7424,7432,7440,7446,7448,7454,7456,7464,7472,7480,7488,7494,7496,7502,7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7528,7536,7538,7542,7544,7546,7548,7550,7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,7600,7602,7603,7604,7605,7606,7607,7608,7610,7612,7613,7614,7615,7618,7619,7620,7621,7622,7623,7624,7628,7629,7632,7634,7635,7636,7638,7639,7640,7642,7644,7645,7648,7650,7651,7652,7653,7654,7655,7656,7658,7660,7661,7664,7666,7667,7668,7669,7670,7671,7672,7674,7676,7677,7679,7680,7691,7696,7704,7706,7716,7717,7719,7720,7722,7727,7728,7743,7745,7764,7765,7775,7776,7781,7782,7792,7793,7794,7796,7797,7798,7799,7800,7801,7802,7807,7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7823,7824,7837,7840,7871,7888,7901,7905,7906,7909,7921,7936,7938,7939,7943,7944,7946,7947,7950,7952,7955,7956,7957,7958,7960,7961,7966,7972,7973,7974,7975,7976,7977,7978,7979,7980,7982,7983,7984,7986,7987,7988,7989,7993,7994,7996,7998,8e3,8005,8006,8010,8014,8015,8016,8032,8048,8064,8067,8068,8069,8073,8074,8076,8080,8443,8448,8487,8512,8523,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8604,8630,8656,8682,8683,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8822,8823,8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839,8840,8841,8842,8843,8844,8845,8846,8847,8848,8849,8850,8851,8852,9076,9078,9110,9112,9146,9149,9161,9162,9170,9196,9200,9216,9263,9264,9311,9312,9313,9314,9315,9316,9317,9318,9319,9320,9321,9322,9323,9324,9325,9326,9327,9328,9329,9330,9331,9332,9333,9334,9335,9340,9342,9344,9345,9346,9347,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360,9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,9374,9375,9376,9377,9378,9379,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389,9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405,9406,9407,9408,9409,9410,9411,9412,9413,9414,9415,9416,9417,9418,9419,9420,9421,9422,9423,9424,9425,9426,9427,9428,9429,9430,9431,9432,9433,9434,9435,9436,9437,9438,9439,9440,9441,9442,9443,9444,9445,9451,9452,9453,9454,9455,9458,9459,9460,9465,9469,9470,9472,9510,9511,9512,9517,9518,9520,9576,9583,9584,9585,9599,9600,9623,9632,9639,9640,9647,9648,9655,9656,9663,9664,9671,9672,9679,9680,9687,9688,9695,9696,9728,9775,9776,9795,9856,9882,9883,9972,9984,10070,10096,10108,10112,10113,10117,10118,10119,10120,10145,10154,10158,10160,10161,10166,10168,10171,10172,10173,10176,10177,10263,10265,10267,10269,10271,10272,10273,10363,10364,10367,10368,10373,10414,10417,10511,10512,10514,10518,10528,10555,10560,10596,10608,10624,10655,10656,10666,10696,10704,10705,10720,10752,10762,10801,10816,10879,10880,10885,10886,11011,11012,11178,11179,11341,11342,11446,11456,11520,11521,11523,11524,11527,11528,11529,11530,11613,11614,11660,11661,11668,11669,11670,11671,11711,11713,11743,11744,11752,11753,11789,11790,11888,11889,11908,11909,11974,11975,12009,12010,12011,12012,12013,12014,12097,12098,12099,12102,12108,12109,12225,12229,12379,12380,12529,12530,12537,12538,12666,12667,12798,12800,12812,12815,12816,12817,13054,13055,13132,13133,13266,13267,13318,13319,13462,13463,13612,13613,13742,13743,13747,13748,13872,13873,13985,13986,14022,14023,14028,14029,14072,14073,14198,14199,14294,14336,14357,14358,14477,14480,14535,14544,14584,14590,14592,14604,14605,14608,14624,14625,14626,14627,14628,14629,14630,14631,14632,14633,14634,14636,14656,14657,14658,14659,14660,14661,14662,14663,14664,14665,14666,14667,14668,14669,14670,14671,14672,14673,14674,14675,14676,14677,14678,14679,14680,14681,14682,14683,14684,14685,14686,14687,14688,14689,14690,14691,14692,14693,14694,14695,14696,14697,14698,14699,14700,14701,14702,14703,14704,14707,14708,14718,14719,14720,14721,14722,14723,14724,14725,14726,14727,14728,14729,14730,14731,14732,14733,14734,14735,14736,14737,14738,14739,14740,14741,14742,14743,14744,14745,14746,14747,14748,14750,14752,14822,14832,14834,14840,14848,14871,14880,14882,14883,14884,14885,14886,14887,14888,14889,14890,14891,14892,14893,14894,14895,14896,14898,14899,14900,14901,14902,14903,14904,14905,14906,14907,14908,14909,14910,14911,14912,14913,14914,14915,14916,14917,14918,14919,14920,14921,14922,14923,14924,14925,14926,14927,14928,14929,14930,14931,14932,14933,14934,14935,14936,14937,14938,14939,14940,14941,14942,14943,14944,14945,14946,14947,14948,14949,14950,14951,14952,14953,14954,14955,14956,14957,14958,14959,14960,14961,14969,14970,14971,14972,14973,14974,14975,14976,14977,14978,14979,14980,14981,14982,14983,14984,14985,14987,14988,14989,14990,14991,14992,14993,14994,14995,14996,14998,14999,15e3,15001,15002,15003,15004,15005,15006,15007,15008,15009,15010,15011,15012,15013,15014,15015,15016,15017,15018,15019,15020,15021,15022,15024,15025,15026,15027,15028,15029,15030,15031,15032,15095,15096,15098,15099,15106,15107,15110,15111,15115,15116,15139,15141,15143,15144,15148,15152,15158,15162,15168,15220,15224,15232,15234,15284,15300,15301,15310,15312,15313,15314,15315,15316,15317,15318,15319,15320,15321,15322,15328,15346,15352,15355,15356,15357,15358,15360,15361,15362,15363,15364,15365,15366,15367,15368,15369,15370,15398,15406,15408,15431,15442,15444,15455,15456,15485,15488,15491,15492,15539,15540,15542,15546,15548,15549,15553,15566,15567,15568,15569,15570,15571,15572,15573,15574,15575,15576,15577,15578,15582,15584,15589,15590,15591,15600,15601,15602,15603,15604,15605,15606,15607,15608,15609,15610,15615,15616,15657,15663,15665,15667,15669,15671,15680,15683,15684,15692,15693,15694,15696,15697,15698,15699,15700,15701,15702,15703,15704,15705,15706,15708,15712,15728,15729,15735,15738,15739,15740,15741,15742,15792,15793,15794,15797,15799,15801,15806,15808,15809,15810,15811,15835,15837,15838,15840,15851,15852,15854,15856,15858,15859,15861,15862,15863,15873,15879,15881,15887,15889,15895,15904,15911,15912,15919,15920,15955,15956,15963,15964,15968,15974,15984,15985,15986,15987,15988,15989,15990,15991,15992,15993,15994,15995,15996,15997,15998,15999,16e3,16001,16002,16003,16004,16005,16006,16007,16008,16009,16010,16011,16012,16013,16014,16015,16016,16017,16018,16019,16020,16021,16022,16023,16024,16025,16026,16027,16028,16029,16030,16031,16032,16033,16034,16035,16036,16037,16038,16039,16040,16041,16042,16043,16044,16045,16046,16047,16048,16049,16050,16051,16052,16053,16054,16055,16056,16057,16058,16059,16060,16061,16062,16063,16064,16099,16101,16102,16104,16105,16107,16108,16109,16110,16112,16113,16114,16115,16116,16117,16118,16119,16120,16121,16122,16128,16164,16176,16199,16203,16252,16256,16384,16491,16492,16499,16500,16504,16505,16562,16563,16593,16594,16595,16596,16637,16638,16750,16752,16858,16896,16897,16898,16899,16900,16901,16902,16903,16915,16916,16917,16918,16919,16920,16925,16926,16927,16937,16938,16951,16952,16957,16958,16959,16960,16962,16963,16965,16966,17074,17090,17107,17246,17252,17342,17344,17360,17424,17426,17480,17520,17530,17532,17534,17536,17552,17555,17556,17562,17568,17584,17587,17589,17613,17616,17618,17619,17620,17621,17622,17639,17640,17644,17648,17649,17650,17651,17652,17653,17654,17655,17656,17657,17658,17659,17660,17661,17662,17663,17789,17791,17792,17793,17799,17800,17806,17807,17808,17809,17810,17811,17812,17813,17814,17815,17816,17817,17818,17819,17825,17851,17854,17855,17856,17857,17883,17894,17904,17905,17950,17952,17983,17986,17992,17994,18e3,18002,18008,18010,18013,18016,18019,18020,18023,18024,18031,18041,18044,18046,18048,18060,18061,18087,18088,18107,18108,18110,18111,18126,18128,18142,18176,18299,18304,18307,18311,18356,18359,18368,18421,18425,18442,18444,18445,18448,18460,18464,18465,18512,18557,18558,18688,18717,18720,18769,18784,18785,18812,18816,18848,18852,18864,18881,18882,18890,18891,18896,18934,18939,18944,18974,18975,18976,19012,19016,19024,19025,19030,19072,19112,19152,19230,19232,19233,19234,19235,19236,19237,19238,19239,19240,19241,19242,19328,19368,19376,19428,19439,19440,19456,19511,19520,19542,19552,19560,19584,19590,19592,19593,19594,19638,19639,19641,19644,19645,19647,19670,19671,19672,19680,19703,19705,19712,19743,19751,19760,19808,19827,19828,19830,19835,19840,19862,19868,19871,19872,19898,19903,19904,19968,20024,20028,20030,20032,20048,20050,20096,20097,20100,20101,20103,20108,20112,20116,20117,20120,20121,20148,20152,20155,20159,20160,20161,20162,20163,20164,20168,20176,20185,20192,20221,20223,20224,20253,20256,20288,20296,20297,20325,20327,20331,20336,20343,20352,20406,20409,20416,20438,20440,20448,20467,20472,20480,20498,20505,20509,20521,20528,20608,20681,20736,20787,20800,20851,20858,20864,20960,20961,20962,20963,20964,20965,20966,20967,20968,20969,20991,20992,20993,20994,20995,21048,21063,21070,21074,21075,21076,21077,21078,21079,21080,21081,21082,21083,21094,21095,21096,21097,21098,21099,21100,21101,21102,21103,21104,21119,21122,21123,21168,21171,21175,21177,21179,21181,21182,21186,21200,21225,21232,21233,21234,21235,21236,21237,21238,21239,21240,21241,21242,21248,21251,21287,21292,21293,21301,21302,21303,21304,21305,21306,21307,21308,21309,21310,21311,21312,21316,21328,21363,21364,21366,21367,21376,21378,21379,21427,21430,21439,21441,21445,21450,21453,21454,21456,21457,21458,21459,21460,21461,21462,21463,21464,21465,21466,21467,21468,21469,21472,21473,21493,21504,21522,21523,21548,21551,21554,21556,21557,21558,21560,21566,21632,21639,21640,21641,21642,21646,21647,21662,21663,21673,21674,21680,21727,21728,21731,21739,21744,21745,21746,21747,21748,21749,21750,21751,21752,21753,21754,21760,21762,21764,21765,21773,21775,21777,21779,21801,21802,21809,21810,21812,21813,21818,21820,21821,21822,21824,21825,21829,21831,21833,21835,21838,21840,21841,21847,21848,21853,21858,21860,21862,21869,21872,21877,21888,21936,21939,21945,21946,21947,21951,21953,21954,21956,21958,21959,21960,21968,21969,21970,21971,21972,21973,21974,21975,21976,21977,21978,22016,22063,22066,22070,22072,22076,22078,22079,22081,22104,22108,22110,22144,22192,22195,22203,22205,22206,22207,22209,22212,22213,22224,22225,22226,22227,22228,22229,22230,22231,22232,22233,22234,22272,22315,22316,22317,22318,22320,22326,22327,22328,22336,22337,22338,22339,22340,22341,22342,22343,22344,22345,22346,22400,22426,22429,22432,22434,22438,22439,22444,22448,22449,22450,22451,22452,22453,22454,22455,22456,22457,22458,22460,22464,22560,22592,22624,22625,22626,22627,22628,22629,22630,22631,22632,22633,22634,22643,22655,22656,22720,22777,22784,22810,22912,23023,23024,23029,23040,23108,23168,23215,23296,23367,23424,23481,23488,23519,23520,23521,23522,23523,23524,23525,23526,23527,23528,23529,23530,23534,23536,23632,23662,23664,23669,23670,23680,23728,23735,23744,23748,23750,23760,23761,23762,23763,23764,23765,23766,23767,23768,23769,23770,23771,23778,23779,23800,23805,23824,23936,24005,24016,24017,24063,24079,24083,24096,24192,24194,24320,24427,24432,24445,24448,24457,24464,24474,24476,24477,24479,24480,24484,24576,24694,24704,24743,24745,24805,24807,24810,24813,24819,24827,24835,24837,24844,24874,24878,24937,24960,25026,25029,25030,25088,25175,25184,25202,25216,25242,25268,25294,25301,25302,25320,25346,25372,25373,25374,25376,25378,25379,25381,25383,25385,25389,25390,25398,25402,25403,25404,25405,25412,25413,25424,25450,25476,25478,25479,25483,25485,25493,25494,25501,25502,25528,25530,25531,25535,25536,25541,25542,25543,25546,25553,25554,25580,25606,25632,25658,25684,25710,25736,25762,25788,25814,25840,25866,25894,25896,25921,25922,25947,25948,25954,25979,25980,26005,26006,26012,26037,26038,26063,26064,26070,26095,26096,26121,26122,26128,26153,26154,26179,26180,26186,26187,26188,26190,26191,26192,26193,26194,26195,26196,26197,26198,26199,26200,26201,26202,26203,26204,26205,26206,26207,26208,26209,26210,26211,26212,26213,26214,26215,26216,26217,26218,26219,26220,26221,26222,26223,26224,26225,26226,26227,26228,26229,26230,26231,26232,26233,26234,26235,26236,26237,26238,26239,26240,26295,26299,26349,26357,26358,26372,26373,26380,26395,26400,26401,26416,26496,26565,26567,26576,26583,26624,26628,26629,26656,26657,26659,26660,26661,26663,26664,26665,26675,26676,26680,26681,26682,26683,26684,26690,26691,26695,26696,26697,26698,26699,26700,26701,26704,26705,26707,26708,26709,26711,26712,26713,26714,26715,26716,26717,26718,26719,26720,26721,26723,26724,26725,26727,26731,26732,26739,26740,26744,26745,26749,26750,26751,26752,26762,26763,26780,26785,26788,26789,26794,26795,26812,26864,26866,26880,26924,26928,27028,27040,27055,27057,27072,27073,27088,27089,27126,27136,27138,27139,27140,27141,27142,27143,27144,27145,27146,27147,27149,27152,27183,27184,27210,27216,27242,27244,27248,27274,27291,27366,27395,27408,27451,27456,27465,27472,27474,27520,27643,27648,27770,27771,27812,27813,27985,28e3,28013,28016,28020,28032,28148,28160,28245,28288,28300,28304,28360,28368,28378,28384,28424,28432,28462,28560,28569,28672,28677,28736,28737,28800,28801,28802,28900,28901,29026,29027,29089,29090,29187,29188,29196,29197,29212,29213,29418,29419,29437,29438,29465,29466,29584,29585,29720,29721,29851,29852,30061,30062,30167,30208,30261,30272,30366,30368,30498,30592,30622,30721,30722,30752,30848,31088,31104,31230,31232],N=[1,2,3,1,3,2,4,5,6,5,6,5,7,8,9,10,11,12,13,14,15,16,6,5,17,5,6,18,6,19,5,1,3,1,2,5,6,5,20,5,21,5,6,5,22,23,6,24,5,25,6,26,20,5,27,5,17,5,17,28,19,5,19,29,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,32,33,30,31,30,31,30,31,20,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,34,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,35,30,31,30,31,30,31,36,37,38,30,31,30,31,39,30,31,40,30,31,20,41,42,43,30,31,40,44,45,46,47,30,31,48,20,46,49,50,51,30,31,30,31,30,31,52,30,31,52,20,30,31,52,30,31,53,30,31,30,31,54,30,31,20,55,30,31,20,56,55,57,58,59,57,58,59,57,58,59,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,60,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,61,57,58,59,30,31,62,63,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,64,20,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,20,65,30,31,66,67,68,30,31,69,70,71,30,31,30,31,30,31,30,31,30,31,72,73,74,75,76,20,77,20,78,20,79,80,20,77,81,20,82,20,83,84,20,85,86,20,87,88,20,86,20,89,90,20,91,20,92,20,93,20,93,20,94,93,95,96,97,20,98,20,55,20,99,100,20,101,102,101,6,102,6,101,6,102,6,102,6,25,103,25,30,31,30,31,102,6,30,31,0,104,50,5,105,0,6,106,25,107,0,108,0,109,110,17,0,17,111,112,113,19,114,19,115,116,117,118,119,120,121,122,123,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,124,125,126,127,128,129,5,30,31,130,30,31,20,64,131,17,19,132,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,5,25,6,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,133,30,31,30,31,30,31,30,31,30,31,30,31,30,31,134,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,0,135,0,102,5,0,136,137,0,5,0,5,0,25,5,25,5,25,5,25,5,25,0,55,0,55,5,6,0,21,5,25,5,21,0,5,55,102,55,25,7,8,9,10,11,12,13,14,15,16,5,55,25,55,5,55,25,21,5,25,102,25,5,25,55,7,8,9,10,11,12,13,14,15,16,55,5,55,5,0,21,55,25,55,25,0,55,25,55,0,7,8,9,10,11,12,13,14,15,16,55,25,102,5,102,0,55,25,102,25,102,25,102,25,0,5,0,55,25,0,5,0,55,0,25,18,55,25,18,25,55,18,25,18,25,18,55,25,55,25,5,7,8,9,10,11,12,13,14,15,16,5,102,55,25,18,0,55,0,55,0,55,0,55,0,55,0,55,0,25,55,18,25,0,18,0,18,25,55,0,18,0,55,0,55,25,0,7,8,9,10,11,12,13,14,15,16,55,5,27,5,0,25,18,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,25,0,18,25,0,25,0,25,0,25,0,55,0,55,0,7,8,9,10,11,12,13,14,15,16,25,55,25,0,25,18,0,55,0,55,0,55,0,55,0,55,0,55,0,25,55,18,25,0,25,18,0,18,25,0,55,0,55,25,0,7,8,9,10,11,12,13,14,15,16,5,0,55,0,25,18,0,55,0,55,0,55,0,55,0,55,0,55,0,25,55,18,25,18,25,0,18,0,18,25,0,25,18,0,55,0,55,25,0,7,8,9,10,11,12,13,14,15,16,5,55,27,0,25,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,18,25,18,0,18,0,18,25,0,55,0,18,0,7,8,9,10,11,12,13,14,15,16,27,5,0,25,18,0,55,0,55,0,55,0,55,0,55,25,18,0,25,0,25,0,25,0,55,0,55,25,0,7,8,9,10,11,12,13,14,15,16,0,27,5,0,25,18,0,55,0,55,0,55,0,55,0,55,0,25,55,18,25,18,0,25,18,0,18,25,0,18,0,55,0,55,25,0,7,8,9,10,11,12,13,14,15,16,0,55,0,25,18,0,55,0,55,0,55,0,55,18,25,0,18,0,18,25,55,0,18,0,55,25,0,7,8,9,10,11,12,13,14,15,16,27,0,5,55,0,18,0,55,0,55,0,55,0,55,0,55,0,25,0,18,25,0,25,0,18,0,7,8,9,10,11,12,13,14,15,16,0,18,5,0,55,25,55,138,25,0,5,55,102,25,5,7,8,9,10,11,12,13,14,15,16,5,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,25,55,138,25,0,25,55,0,55,0,102,0,25,0,7,8,9,10,11,12,13,14,15,16,0,55,0,55,5,25,5,7,8,9,10,11,12,13,14,15,16,27,5,25,5,25,5,25,5,18,55,0,55,0,25,18,25,5,25,55,25,0,25,0,5,25,5,0,5,0,55,18,25,18,25,18,25,18,25,55,7,8,9,10,11,12,13,14,15,16,5,55,18,25,55,25,55,18,55,18,55,25,55,25,18,25,18,25,55,18,7,8,9,10,11,12,13,14,15,16,18,25,5,139,0,139,0,139,0,55,5,102,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,25,5,140,141,142,143,144,145,146,147,148,27,0,55,5,0,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,0,235,236,237,238,239,240,0,5,55,5,55,2,55,5,0,55,5,241,55,0,55,0,55,25,0,55,25,5,0,55,25,0,55,0,55,0,25,0,55,25,18,25,18,25,18,25,5,102,5,55,25,0,7,8,9,10,11,12,13,14,15,16,0,27,0,5,25,21,0,7,8,9,10,11,12,13,14,15,16,0,55,102,55,0,55,25,55,0,55,0,55,0,25,18,25,18,0,18,25,18,25,0,5,0,5,7,8,9,10,11,12,13,14,15,16,55,0,55,0,55,0,55,0,7,8,9,10,11,12,13,14,15,16,140,0,5,55,25,18,25,0,5,55,18,25,18,25,0,25,18,25,18,25,18,25,0,25,7,8,9,10,11,12,13,14,15,16,0,7,8,9,10,11,12,13,14,15,16,0,5,102,5,0,25,6,0,25,18,55,25,18,25,18,25,18,25,18,55,0,7,8,9,10,11,12,13,14,15,16,5,25,5,0,25,18,55,18,25,18,25,18,25,55,7,8,9,10,11,12,13,14,15,16,55,25,18,25,18,25,18,25,18,0,5,55,18,25,18,25,0,5,7,8,9,10,11,12,13,14,15,16,0,55,7,8,9,10,11,12,13,14,15,16,55,102,5,0,5,0,25,5,25,18,25,55,25,55,18,25,55,0,25,0,20,101,20,101,242,20,243,20,101,25,0,25,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,244,245,246,247,248,249,20,250,20,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,251,252,251,0,252,0,251,252,251,252,251,0,252,0,253,251,254,251,255,251,256,251,0,252,0,252,0,252,0,252,251,252,257,258,259,260,261,262,0,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,251,311,312,313,0,314,315,252,316,317,6,318,6,319,320,321,0,322,323,324,325,6,251,326,327,0,328,329,252,330,0,6,251,331,332,333,126,334,335,252,336,130,6,0,337,338,339,0,340,341,342,343,344,6,0,2,21,5,6,5,6,5,6,3,21,2,5,18,5,18,5,2,21,0,21,345,101,0,346,347,348,349,350,351,5,101,345,26,22,23,346,347,348,349,350,351,5,0,101,0,5,0,25,6,25,6,25,0,5,120,5,120,5,20,120,20,120,20,5,120,5,352,120,5,120,5,353,5,120,5,354,355,120,352,20,120,356,120,20,55,20,5,20,120,5,120,20,5,357,5,27,358,359,241,30,31,241,27,5,0,5,0,5,0,5,0,26,22,23,346,347,348,349,350,351,27,26,22,23,346,347,348,349,350,351,27,26,22,23,346,347,348,349,350,351,27,5,360,361,345,27,26,22,23,346,347,348,349,350,351,27,345,5,26,22,23,346,347,348,349,350,351,27,26,22,23,346,347,348,349,350,351,27,26,22,23,346,347,348,349,350,351,27,5,0,5,0,5,0,5,0,5,0,5,0,135,0,136,0,30,31,362,363,364,365,366,30,31,30,31,30,31,367,368,369,370,20,30,31,20,30,31,20,101,371,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,20,5,30,31,30,31,25,30,31,0,5,27,5,372,0,372,0,372,0,55,0,102,5,0,25,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,25,5,373,5,0,5,0,5,0,5,0,5,0,2,5,102,55,241,5,241,25,18,5,102,5,241,102,55,5,0,55,0,25,6,102,55,5,55,5,102,55,0,55,0,55,0,5,27,5,55,0,5,0,55,5,0,27,5,27,5,27,5,27,5,27,5,0,55,374,55,374,55,374,55,374,55,0,5,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,0,55,102,55,0,5,0,55,102,5,55,102,5,55,7,8,9,10,11,12,13,14,15,16,55,0,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,55,25,6,5,25,5,102,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,101,25,55,241,25,5,0,6,102,6,30,31,30,31,30,31,30,31,30,31,30,31,30,31,20,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,101,20,30,31,30,31,375,30,31,30,31,30,31,30,31,30,31,102,6,30,31,376,20,55,30,31,30,31,20,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,30,31,377,378,379,380,0,381,382,383,384,30,31,30,31,0,55,101,20,55,25,55,25,55,25,55,18,25,18,5,0,27,5,0,55,5,0,18,55,18,25,0,5,7,8,9,10,11,12,13,14,15,16,0,25,55,5,55,5,55,0,7,8,9,10,11,12,13,14,15,16,55,25,5,55,25,18,0,5,55,0,25,18,55,25,18,25,18,25,18,5,0,102,7,8,9,10,11,12,13,14,15,16,0,5,55,25,102,55,7,8,9,10,11,12,13,14,15,16,55,0,55,25,18,25,18,25,0,55,25,55,25,18,0,7,8,9,10,11,12,13,14,15,16,0,5,55,102,55,5,55,18,25,18,55,25,55,25,55,25,55,25,55,25,55,0,55,102,5,55,18,25,18,5,55,102,18,25,0,55,0,55,0,55,0,55,0,55,0,20,385,20,6,101,20,0,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,55,18,25,18,25,18,5,18,25,0,7,8,9,10,11,12,13,14,15,16,0,55,0,55,0,55,0,1,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,0,55,0,466,467,468,469,470,471,472,0,473,474,475,476,477,0,55,25,55,5,55,0,55,0,55,0,55,0,55,0,55,6,0,55,478,55,5,0,55,0,55,0,55,478,5,0,25,5,6,5,0,25,5,18,5,18,5,6,0,5,6,5,0,5,0,478,55,478,55,478,0,478,55,478,55,478,55,478,55,478,55,0,21,0,5,6,5,6,5,7,8,9,10,11,12,13,14,15,16,6,5,17,5,6,18,6,19,5,55,102,55,479,55,0,55,0,55,0,55,0,55,0,5,6,5,0,5,0,21,5,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,5,0,27,0,5,241,27,5,27,5,0,5,0,5,0,5,25,0,55,0,55,0,25,27,0,55,27,0,55,241,55,241,0,55,25,0,55,0,5,55,0,55,5,241,0,480,481,55,0,7,8,9,10,11,12,13,14,15,16,0,55,0,55,0,5,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,5,27,55,5,27,55,0,27,0,55,0,55,0,27,55,27,0,5,55,0,5,0,55,0,27,55,27,0,27,55,25,0,25,0,25,55,0,55,0,55,0,25,0,25,26,22,23,346,27,0,5,0,55,27,5,55,27,0,55,5,55,25,0,27,5,0,55,0,5,55,0,27,55,0,27,55,0,5,0,27,0,55,0,108,0,115,0,27,0,26,22,23,346,347,348,349,350,351,27,0,18,25,18,55,25,5,0,26,22,23,346,347,348,349,350,351,27,7,8,9,10,11,12,13,14,15,16,0,25,18,55,18,25,18,25,5,21,5,0,55,0,7,8,9,10,11,12,13,14,15,16,0,25,55,25,18,25,0,7,8,9,10,11,12,13,14,15,16,5,0,55,25,5,55,0,25,18,55,18,25,18,55,5,25,5,0,7,8,9,10,11,12,13,14,15,16,55,5,55,5,0,27,0,55,0,55,18,25,18,25,18,25,5,0,55,0,55,0,55,0,55,0,55,5,0,55,25,18,25,0,7,8,9,10,11,12,13,14,15,16,0,25,18,0,55,0,55,0,55,0,55,0,55,0,55,0,25,55,18,25,18,0,18,0,18,0,55,0,18,0,55,18,0,25,0,25,0,55,18,25,18,25,18,25,18,25,55,5,55,0,7,8,9,10,11,12,13,14,15,16,0,55,18,25,0,18,25,18,25,5,55,25,0,55,18,25,18,25,18,25,5,55,0,7,8,9,10,11,12,13,14,15,16,0,55,25,18,25,18,25,18,25,0,7,8,9,10,11,12,13,14,15,16,0,55,0,25,18,25,18,25,0,7,8,9,10,11,12,13,14,15,16,27,5,0,17,19,7,8,9,10,11,12,13,14,15,16,27,0,55,0,55,0,55,0,241,0,5,0,55,0,55,0,55,0,55,0,55,0,7,8,9,10,11,12,13,14,15,16,0,5,0,55,0,25,5,0,55,25,5,102,5,0,7,8,9,10,11,12,13,14,15,16,0,27,0,55,0,55,0,55,0,55,18,0,25,102,0,55,0,55,0,55,0,55,0,55,0,5,25,5,21,0,5,0,5,0,5,18,25,5,18,21,25,5,25,5,25,5,0,5,25,5,0,5,0,27,0,120,20,120,20,0,20,120,20,120,0,120,0,120,0,120,0,120,0,120,20,0,20,0,20,0,20,120,20,120,0,120,0,120,0,120,0,20,120,0,120,0,120,0,120,0,120,0,20,120,20,120,20,120,20,120,20,120,20,120,20,0,120,5,20,5,20,120,5,20,5,20,120,5,20,5,20,120,5,20,5,20,120,5,20,5,20,120,20,0,7,8,9,10,11,12,13,14,15,16,7,8,9,10,11,12,13,14,15,16,7,8,9,10,11,12,13,14,15,16,7,8,9,10,11,12,13,14,15,16,7,8,9,10,11,12,13,14,15,16,25,5,25,5,25,5,25,5,0,25,0,25,0,55,0,27,25,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,55,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,345,26,22,23,346,347,348,349,350,351,27,0,5,0,482,5,482,5,0,482,5,0,5,0,5,0,5,0,5,0,5,6,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,5,0,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,374,55,0,55,0,55,0,55,0,55,0,21,0,21,25,0,1,0],O=1,P=2,Q=4,R=8,T=64,U=128,V=256,W=512,X=1024,Y=2048,Z=4096,$=8192,rn=16384; -unicode=unicode||{},unicode.title=function(r){for(var n="",t=!1,e=0;r.length>e;e++){var c=r.charAt(e);n+=t?x(c,r,e):S(c),t=z(c)}return n},unicode.capitalize=function(r){if(r.length==0){return r};var n="";n+=w(r.charAt(0));for(var t=1;r.length>t;t++)n+=x(r.charAt(t),r,t);return n},unicode.casefold=function(r){for(var n="",t=0;r.length>t;t++)n+=b(r.charAt(t));return n},unicode.islower=function(r){if(""==r)return!1;for(var n=!1,t=0;r.length>t;t++){var e=r.charAt(t);if(p(e)||o(e))return!1;n||(n=g(e))}return n},unicode.isupper=function(r){if(""==r)return!1;for(var n=!1,t=0;r.length>t;t++){var e=r.charAt(t);if(g(e)||o(e))return!1;n||(n=p(e))}return n},unicode.istitle=function(r){if(""==r)return!1;for(var n=!1,t=!1,e=0;r.length>e;e++){var c=r.charAt(e);if(p(c)||o(c)){if(t)return!1;t=!0,n=!0}else if(g(c)){if(!t)return!1;t=!0,n=!0}else t=!1}return n},unicode.isspace=function(r){if(""==r)return!1;for(var n=0;r.length>n;n++){var t=r.charAt(n);if(!u(t))return!1}return!0},unicode.isalpha=function(r){if(""==r)return!1;for(var n=0;r.length>n;n++){var t=r.charAt(n);if(!j(t))return!1}return!0},unicode.isalnum=function(r){if(""==r)return!1;for(var n=0;r.length>n;n++){var t=r.charAt(n);if(!k(t))return!1}return!0},unicode.isdecimal=function(r){if(""==r)return!1;for(var n=0;r.length>n;n++){var t=r.charAt(n);if(!s(t))return!1}return!0},unicode.isdigit=function(r){if(""==r)return!1;for(var n=0;r.length>n;n++){var t=r.charAt(n);if(!A(t))return!1}return!0},unicode.isnumeric=function(r){if(""==r)return!1;for(var n=0;r.length>n;n++){var t=r.charAt(n);if(!l(t))return!1}return!0},unicode.isidentifier=function(r){if(""==r)return!1;if("_"!=r.charAt(0)&&!f(r.charAt(0)))return!1;for(var n=1;r.length>n;n++)if(!h(r.charAt(n)))return!1;return!0},unicode.isprintable=function(r){for(var n=0;r.length>n;n++)if(!C(r.charAt(n)))return!1;return!0},unicode.lower=function(r){for(var n="",t=0;r.length>t;t++){var e=r.charAt(t);n+=x(e,r,t)}return n},unicode.swapcase=function(r){for(var n="",t=0;r.length>t;t++){var e=r.charAt(t);n+=p(e)?x(e,r,t):g(e)?w(e):e}return n},unicode.upper=function(r){for(var n="",t=0;r.length>t;t++){var e=r.charAt(t);n+=w(e)}return n}}({},function(){return this}()); -; var __BRYTHON__=__BRYTHON__ ||{} +try{ +eval("async function* f(){}")}catch(err){console.warn("Your browser is not fully supported. If you are using "+ +"Microsoft Edge, please upgrade to the latest version")} ;(function($B){ $B.isWebWorker=('undefined' !==typeof WorkerGlobalScope)&& ("function"===typeof importScripts)&& (navigator instanceof WorkerNavigator) -var _window=self; +$B.isNode=(typeof process !=='undefined')&&(process.release.name==='node') +var _window +if($B.isNode){_window={location:{href:'',origin:'',pathname:''},navigator:{userLanguage:''}}}else{ +_window=self} +var href=_window.location.href +$B.protocol=href.split(':')[0] +$B.BigInt=_window.BigInt var $path if($B.brython_path===undefined){ var this_url; @@ -35,7 +41,6 @@ $B.async_enabled=false if($B.async_enabled){$B.block={}} $B.imported={} $B.precompiled={} -$B._globals={} $B.frames_stack=[] $B.builtins={} $B.builtins_scope={id:'__builtins__',module:'__builtins__',binding:{}} @@ -58,7 +63,6 @@ $B.lambda_magic=Math.random().toString(36).substr(2,8) $B.set_func_names=function(klass,module){if(klass.$infos){var name=klass.$infos.__name__ klass.$infos.__module__=module klass.$infos.__qualname__=name}else{var name=klass.__name__ -console.log("bizarre",klass) klass.$infos={__name__:name,__module__:module,__qualname__:name}} klass.__module__=module for(var attr in klass){if(typeof klass[attr]=='function'){klass[attr].$infos={__doc__:klass[attr].__doc__ ||"",__module__:module,__qualname__ :name+'.'+attr,__name__:attr} @@ -75,20 +79,32 @@ $B.globals=function(){ return $B.frames_stack[$B.frames_stack.length-1][3]} $B.scripts={} $B.$options={} +$B.update_VFS=function(scripts){$B.VFS=$B.VFS ||{} +var vfs_timestamp=scripts.$timestamp +if(vfs_timestamp !==undefined){delete scripts.$timestamp} +for(var script in scripts){if($B.VFS.hasOwnProperty(script)){console.warn("Virtual File System: duplicate entry "+script)} +$B.VFS[script]=scripts[script] +$B.VFS[script].timestamp=vfs_timestamp}} +$B.add_files=function(files){ +$B.files=$B.files ||{} +for(var file in files){$B.files[file]=files[file]}} $B.python_to_js=function(src,script_id){$B.meta_path=$B.$meta_path.slice() if(!$B.use_VFS){$B.meta_path.shift()} if(script_id===undefined){script_id="__main__"} var root=__BRYTHON__.py2js(src,script_id,script_id),js=root.to_js() js="(function() {\n var $locals_"+script_id+" = {}\n"+js+"\n}())" return js} -$B.regexIdentifier=/^(?:[\$A-Z_a-z\xAA\xB5\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0-\u08B4\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0AF9\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D\u0C58-\u0C5A\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D\u0D4E\u0D5F-\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F5\u13F8-\u13FD\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191E\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u1A00-\u1A16\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303C\u3041-\u3096\u309B-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FD5\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B\uA640-\uA66E\uA67F-\uA69D\uA6A0-\uA6EF\uA717-\uA71F\uA722-\uA788\uA78B-\uA7AD\uA7B0-\uA7B7\uA7F7-\uA801\uA803-\uA805\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB\uA8FD\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uA9E0-\uA9E4\uA9E6-\uA9EF\uA9FA-\uA9FE\uAA00-\uAA28\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA7E-\uAAAF\uAAB1\uAAB5\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB65\uAB70-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]|\uD800[\uDC00-\uDC0B\uDC0D-\uDC26\uDC28-\uDC3A\uDC3C\uDC3D\uDC3F-\uDC4D\uDC50-\uDC5D\uDC80-\uDCFA\uDD40-\uDD74\uDE80-\uDE9C\uDEA0-\uDED0\uDF00-\uDF1F\uDF30-\uDF4A\uDF50-\uDF75\uDF80-\uDF9D\uDFA0-\uDFC3\uDFC8-\uDFCF\uDFD1-\uDFD5]|\uD801[\uDC00-\uDC9D\uDD00-\uDD27\uDD30-\uDD63\uDE00-\uDF36\uDF40-\uDF55\uDF60-\uDF67]|\uD802[\uDC00-\uDC05\uDC08\uDC0A-\uDC35\uDC37\uDC38\uDC3C\uDC3F-\uDC55\uDC60-\uDC76\uDC80-\uDC9E\uDCE0-\uDCF2\uDCF4\uDCF5\uDD00-\uDD15\uDD20-\uDD39\uDD80-\uDDB7\uDDBE\uDDBF\uDE00\uDE10-\uDE13\uDE15-\uDE17\uDE19-\uDE33\uDE60-\uDE7C\uDE80-\uDE9C\uDEC0-\uDEC7\uDEC9-\uDEE4\uDF00-\uDF35\uDF40-\uDF55\uDF60-\uDF72\uDF80-\uDF91]|\uD803[\uDC00-\uDC48\uDC80-\uDCB2\uDCC0-\uDCF2]|\uD804[\uDC03-\uDC37\uDC83-\uDCAF\uDCD0-\uDCE8\uDD03-\uDD26\uDD50-\uDD72\uDD76\uDD83-\uDDB2\uDDC1-\uDDC4\uDDDA\uDDDC\uDE00-\uDE11\uDE13-\uDE2B\uDE80-\uDE86\uDE88\uDE8A-\uDE8D\uDE8F-\uDE9D\uDE9F-\uDEA8\uDEB0-\uDEDE\uDF05-\uDF0C\uDF0F\uDF10\uDF13-\uDF28\uDF2A-\uDF30\uDF32\uDF33\uDF35-\uDF39\uDF3D\uDF50\uDF5D-\uDF61]|\uD805[\uDC80-\uDCAF\uDCC4\uDCC5\uDCC7\uDD80-\uDDAE\uDDD8-\uDDDB\uDE00-\uDE2F\uDE44\uDE80-\uDEAA\uDF00-\uDF19]|\uD806[\uDCA0-\uDCDF\uDCFF\uDEC0-\uDEF8]|\uD808[\uDC00-\uDF99]|\uD809[\uDC00-\uDC6E\uDC80-\uDD43]|[\uD80C\uD840-\uD868\uD86A-\uD86C\uD86F-\uD872][\uDC00-\uDFFF]|\uD80D[\uDC00-\uDC2E]|\uD811[\uDC00-\uDE46]|\uD81A[\uDC00-\uDE38\uDE40-\uDE5E\uDED0-\uDEED\uDF00-\uDF2F\uDF40-\uDF43\uDF63-\uDF77\uDF7D-\uDF8F]|\uD81B[\uDF00-\uDF44\uDF50\uDF93-\uDF9F]|\uD82C[\uDC00\uDC01]|\uD82F[\uDC00-\uDC6A\uDC70-\uDC7C\uDC80-\uDC88\uDC90-\uDC99]|\uD835[\uDC00-\uDC54\uDC56-\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDEA5\uDEA8-\uDEC0\uDEC2-\uDEDA\uDEDC-\uDEFA\uDEFC-\uDF14\uDF16-\uDF34\uDF36-\uDF4E\uDF50-\uDF6E\uDF70-\uDF88\uDF8A-\uDFA8\uDFAA-\uDFC2\uDFC4-\uDFCB]|\uD83A[\uDC00-\uDCC4]|\uD83B[\uDE00-\uDE03\uDE05-\uDE1F\uDE21\uDE22\uDE24\uDE27\uDE29-\uDE32\uDE34-\uDE37\uDE39\uDE3B\uDE42\uDE47\uDE49\uDE4B\uDE4D-\uDE4F\uDE51\uDE52\uDE54\uDE57\uDE59\uDE5B\uDE5D\uDE5F\uDE61\uDE62\uDE64\uDE67-\uDE6A\uDE6C-\uDE72\uDE74-\uDE77\uDE79-\uDE7C\uDE7E\uDE80-\uDE89\uDE8B-\uDE9B\uDEA1-\uDEA3\uDEA5-\uDEA9\uDEAB-\uDEBB]|\uD869[\uDC00-\uDED6\uDF00-\uDFFF]|\uD86D[\uDC00-\uDF34\uDF40-\uDFFF]|\uD86E[\uDC00-\uDC1D\uDC20-\uDFFF]|\uD873[\uDC00-\uDEA1]|\uD87E[\uDC00-\uDE1D])(?:[\$0-9A-Z_a-z\xAA\xB5\xB7\xBA\xC0-\xD6\xD8-\xF6\xF8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0300-\u0374\u0376\u0377\u037A-\u037D\u037F\u0386-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5\u03F7-\u0481\u0483-\u0487\u048A-\u052F\u0531-\u0556\u0559\u0561-\u0587\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u05D0-\u05EA\u05F0-\u05F2\u0610-\u061A\u0620-\u0669\u066E-\u06D3\u06D5-\u06DC\u06DF-\u06E8\u06EA-\u06FC\u06FF\u0710-\u074A\u074D-\u07B1\u07C0-\u07F5\u07FA\u0800-\u082D\u0840-\u085B\u08A0-\u08B4\u08E3-\u0963\u0966-\u096F\u0971-\u0983\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2\u09B6-\u09B9\u09BC-\u09C4\u09C7\u09C8\u09CB-\u09CE\u09D7\u09DC\u09DD\u09DF-\u09E3\u09E6-\u09F1\u0A01-\u0A03\u0A05-\u0A0A\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A59-\u0A5C\u0A5E\u0A66-\u0A75\u0A81-\u0A83\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABC-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AD0\u0AE0-\u0AE3\u0AE6-\u0AEF\u0AF9\u0B01-\u0B03\u0B05-\u0B0C\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3C-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B5C\u0B5D\u0B5F-\u0B63\u0B66-\u0B6F\u0B71\u0B82\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD0\u0BD7\u0BE6-\u0BEF\u0C00-\u0C03\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C39\u0C3D-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C58-\u0C5A\u0C60-\u0C63\u0C66-\u0C6F\u0C81-\u0C83\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3\u0CB5-\u0CB9\u0CBC-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CDE\u0CE0-\u0CE3\u0CE6-\u0CEF\u0CF1\u0CF2\u0D01-\u0D03\u0D05-\u0D0C\u0D0E-\u0D10\u0D12-\u0D3A\u0D3D-\u0D44\u0D46-\u0D48\u0D4A-\u0D4E\u0D57\u0D5F-\u0D63\u0D66-\u0D6F\u0D7A-\u0D7F\u0D82\u0D83\u0D85-\u0D96\u0D9A-\u0DB1\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DE6-\u0DEF\u0DF2\u0DF3\u0E01-\u0E3A\u0E40-\u0E4E\u0E50-\u0E59\u0E81\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\u0EC6\u0EC8-\u0ECD\u0ED0-\u0ED9\u0EDC-\u0EDF\u0F00\u0F18\u0F19\u0F20-\u0F29\u0F35\u0F37\u0F39\u0F3E-\u0F47\u0F49-\u0F6C\u0F71-\u0F84\u0F86-\u0F97\u0F99-\u0FBC\u0FC6\u1000-\u1049\u1050-\u109D\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310\u1312-\u1315\u1318-\u135A\u135D-\u135F\u1369-\u1371\u1380-\u138F\u13A0-\u13F5\u13F8-\u13FD\u1401-\u166C\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u16EE-\u16F8\u1700-\u170C\u170E-\u1714\u1720-\u1734\u1740-\u1753\u1760-\u176C\u176E-\u1770\u1772\u1773\u1780-\u17D3\u17D7\u17DC\u17DD\u17E0-\u17E9\u180B-\u180D\u1810-\u1819\u1820-\u1877\u1880-\u18AA\u18B0-\u18F5\u1900-\u191E\u1920-\u192B\u1930-\u193B\u1946-\u196D\u1970-\u1974\u1980-\u19AB\u19B0-\u19C9\u19D0-\u19DA\u1A00-\u1A1B\u1A20-\u1A5E\u1A60-\u1A7C\u1A7F-\u1A89\u1A90-\u1A99\u1AA7\u1AB0-\u1ABD\u1B00-\u1B4B\u1B50-\u1B59\u1B6B-\u1B73\u1B80-\u1BF3\u1C00-\u1C37\u1C40-\u1C49\u1C4D-\u1C7D\u1CD0-\u1CD2\u1CD4-\u1CF6\u1CF8\u1CF9\u1D00-\u1DF5\u1DFC-\u1F15\u1F18-\u1F1D\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u200C\u200D\u203F\u2040\u2054\u2071\u207F\u2090-\u209C\u20D0-\u20DC\u20E1\u20E5-\u20F0\u2102\u2107\u210A-\u2113\u2115\u2118-\u211D\u2124\u2126\u2128\u212A-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2160-\u2188\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CF3\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D7F-\u2D96\u2DA0-\u2DA6\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE\u2DD0-\u2DD6\u2DD8-\u2DDE\u2DE0-\u2DFF\u3005-\u3007\u3021-\u302F\u3031-\u3035\u3038-\u303C\u3041-\u3096\u3099-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FD5\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA62B\uA640-\uA66F\uA674-\uA67D\uA67F-\uA6F1\uA717-\uA71F\uA722-\uA788\uA78B-\uA7AD\uA7B0-\uA7B7\uA7F7-\uA827\uA840-\uA873\uA880-\uA8C4\uA8D0-\uA8D9\uA8E0-\uA8F7\uA8FB\uA8FD\uA900-\uA92D\uA930-\uA953\uA960-\uA97C\uA980-\uA9C0\uA9CF-\uA9D9\uA9E0-\uA9FE\uAA00-\uAA36\uAA40-\uAA4D\uAA50-\uAA59\uAA60-\uAA76\uAA7A-\uAAC2\uAADB-\uAADD\uAAE0-\uAAEF\uAAF2-\uAAF6\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E\uAB30-\uAB5A\uAB5C-\uAB65\uAB70-\uABEA\uABEC\uABED\uABF0-\uABF9\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D-\uFB28\uFB2A-\uFB36\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE00-\uFE0F\uFE20-\uFE2F\uFE33\uFE34\uFE4D-\uFE4F\uFE70-\uFE74\uFE76-\uFEFC\uFF10-\uFF19\uFF21-\uFF3A\uFF3F\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF\uFFD2-\uFFD7\uFFDA-\uFFDC]|\uD800[\uDC00-\uDC0B\uDC0D-\uDC26\uDC28-\uDC3A\uDC3C\uDC3D\uDC3F-\uDC4D\uDC50-\uDC5D\uDC80-\uDCFA\uDD40-\uDD74\uDDFD\uDE80-\uDE9C\uDEA0-\uDED0\uDEE0\uDF00-\uDF1F\uDF30-\uDF4A\uDF50-\uDF7A\uDF80-\uDF9D\uDFA0-\uDFC3\uDFC8-\uDFCF\uDFD1-\uDFD5]|\uD801[\uDC00-\uDC9D\uDCA0-\uDCA9\uDD00-\uDD27\uDD30-\uDD63\uDE00-\uDF36\uDF40-\uDF55\uDF60-\uDF67]|\uD802[\uDC00-\uDC05\uDC08\uDC0A-\uDC35\uDC37\uDC38\uDC3C\uDC3F-\uDC55\uDC60-\uDC76\uDC80-\uDC9E\uDCE0-\uDCF2\uDCF4\uDCF5\uDD00-\uDD15\uDD20-\uDD39\uDD80-\uDDB7\uDDBE\uDDBF\uDE00-\uDE03\uDE05\uDE06\uDE0C-\uDE13\uDE15-\uDE17\uDE19-\uDE33\uDE38-\uDE3A\uDE3F\uDE60-\uDE7C\uDE80-\uDE9C\uDEC0-\uDEC7\uDEC9-\uDEE6\uDF00-\uDF35\uDF40-\uDF55\uDF60-\uDF72\uDF80-\uDF91]|\uD803[\uDC00-\uDC48\uDC80-\uDCB2\uDCC0-\uDCF2]|\uD804[\uDC00-\uDC46\uDC66-\uDC6F\uDC7F-\uDCBA\uDCD0-\uDCE8\uDCF0-\uDCF9\uDD00-\uDD34\uDD36-\uDD3F\uDD50-\uDD73\uDD76\uDD80-\uDDC4\uDDCA-\uDDCC\uDDD0-\uDDDA\uDDDC\uDE00-\uDE11\uDE13-\uDE37\uDE80-\uDE86\uDE88\uDE8A-\uDE8D\uDE8F-\uDE9D\uDE9F-\uDEA8\uDEB0-\uDEEA\uDEF0-\uDEF9\uDF00-\uDF03\uDF05-\uDF0C\uDF0F\uDF10\uDF13-\uDF28\uDF2A-\uDF30\uDF32\uDF33\uDF35-\uDF39\uDF3C-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF50\uDF57\uDF5D-\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDC80-\uDCC5\uDCC7\uDCD0-\uDCD9\uDD80-\uDDB5\uDDB8-\uDDC0\uDDD8-\uDDDD\uDE00-\uDE40\uDE44\uDE50-\uDE59\uDE80-\uDEB7\uDEC0-\uDEC9\uDF00-\uDF19\uDF1D-\uDF2B\uDF30-\uDF39]|\uD806[\uDCA0-\uDCE9\uDCFF\uDEC0-\uDEF8]|\uD808[\uDC00-\uDF99]|\uD809[\uDC00-\uDC6E\uDC80-\uDD43]|[\uD80C\uD840-\uD868\uD86A-\uD86C\uD86F-\uD872][\uDC00-\uDFFF]|\uD80D[\uDC00-\uDC2E]|\uD811[\uDC00-\uDE46]|\uD81A[\uDC00-\uDE38\uDE40-\uDE5E\uDE60-\uDE69\uDED0-\uDEED\uDEF0-\uDEF4\uDF00-\uDF36\uDF40-\uDF43\uDF50-\uDF59\uDF63-\uDF77\uDF7D-\uDF8F]|\uD81B[\uDF00-\uDF44\uDF50-\uDF7E\uDF8F-\uDF9F]|\uD82C[\uDC00\uDC01]|\uD82F[\uDC00-\uDC6A\uDC70-\uDC7C\uDC80-\uDC88\uDC90-\uDC99\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD835[\uDC00-\uDC54\uDC56-\uDC9C\uDC9E\uDC9F\uDCA2\uDCA5\uDCA6\uDCA9-\uDCAC\uDCAE-\uDCB9\uDCBB\uDCBD-\uDCC3\uDCC5-\uDD05\uDD07-\uDD0A\uDD0D-\uDD14\uDD16-\uDD1C\uDD1E-\uDD39\uDD3B-\uDD3E\uDD40-\uDD44\uDD46\uDD4A-\uDD50\uDD52-\uDEA5\uDEA8-\uDEC0\uDEC2-\uDEDA\uDEDC-\uDEFA\uDEFC-\uDF14\uDF16-\uDF34\uDF36-\uDF4E\uDF50-\uDF6E\uDF70-\uDF88\uDF8A-\uDFA8\uDFAA-\uDFC2\uDFC4-\uDFCB\uDFCE-\uDFFF]|\uD836[\uDE00-\uDE36\uDE3B-\uDE6C\uDE75\uDE84\uDE9B-\uDE9F\uDEA1-\uDEAF]|\uD83A[\uDC00-\uDCC4\uDCD0-\uDCD6]|\uD83B[\uDE00-\uDE03\uDE05-\uDE1F\uDE21\uDE22\uDE24\uDE27\uDE29-\uDE32\uDE34-\uDE37\uDE39\uDE3B\uDE42\uDE47\uDE49\uDE4B\uDE4D-\uDE4F\uDE51\uDE52\uDE54\uDE57\uDE59\uDE5B\uDE5D\uDE5F\uDE61\uDE62\uDE64\uDE67-\uDE6A\uDE6C-\uDE72\uDE74-\uDE77\uDE79-\uDE7C\uDE7E\uDE80-\uDE89\uDE8B-\uDE9B\uDEA1-\uDEA3\uDEA5-\uDEA9\uDEAB-\uDEBB]|\uD869[\uDC00-\uDED6\uDF00-\uDFFF]|\uD86D[\uDC00-\uDF34\uDF40-\uDFFF]|\uD86E[\uDC00-\uDC1D\uDC20-\uDFFF]|\uD873[\uDC00-\uDEA1]|\uD87E[\uDC00-\uDE1D]|\uDB40[\uDD00-\uDDEF])*$/})(__BRYTHON__) +_window.py=function(src){ +var root=$B.py2js(src[0],"script","script"),js=root.to_js() +$B.set_import_paths() +new Function("$locals_script",js)({})}})(__BRYTHON__) ; -__BRYTHON__.implementation=[3,7,4,'dev',0] -__BRYTHON__.__MAGIC__="3.7.4" -__BRYTHON__.version_info=[3,7,0,'final',0] -__BRYTHON__.compiled_date="2019-07-10 12:11:32.046640" -__BRYTHON__.timestamp=1562753492046 -__BRYTHON__.builtin_module_names=["_aio","_ajax","_base64","_binascii","_jsre","_locale","_multiprocessing","_posixsubprocess","_profile","_sre_utils","_string","_strptime","_svg","_warnings","_webworker","_zlib_utils","array","builtins","dis","hashlib","long_int","marshal","math","modulefinder","posix","random","unicodedata"] +__BRYTHON__.implementation=[3,9,0,'final',0] +__BRYTHON__.__MAGIC__="3.9.0" +__BRYTHON__.version_info=[3,9,0,'final',0] +__BRYTHON__.compiled_date="2020-11-03 21:34:58.908604" +__BRYTHON__.timestamp=1604435698908 +__BRYTHON__.builtin_module_names=["_aio","_ajax","_base64","_binascii","_cmath","_io_classes","_json","_jsre","_locale","_multiprocessing","_posixsubprocess","_profile","_sre_utils","_string","_strptime","_svg","_warnings","_webcomponent","_webworker","_zlib_utils","array","builtins","dis","hashlib","long_int","marshal","math","math1","modulefinder","posix","random","unicodedata"] ; ;(function($B){Number.isInteger=Number.isInteger ||function(value){return typeof value==='number' && @@ -97,16 +113,15 @@ Math.floor(value)===value}; Number.isSafeInteger=Number.isSafeInteger ||function(value){return Number.isInteger(value)&& Math.abs(value)<=Number.MAX_SAFE_INTEGER;}; var js,$pos,res,$op var _b_=$B.builtins -var _window=self -var isWebWorker=$B.isa_web_worker= -('undefined' !==typeof WorkerGlobalScope)&& -("function"===typeof importScripts)&& -(navigator instanceof WorkerNavigator) +var _window +if($B.isNode){_window={location:{href:'',origin:'',pathname:''}}}else{ +_window=self} $B.parser={} var clone=$B.clone=function(obj){var res={} for(var attr in obj){res[attr]=obj[attr]} return res} -$B.last=function(table){return table[table.length-1]} +$B.last=function(table){if(table===undefined){console.log($B.frames_stack.slice())} +return table[table.length-1]} $B.list2obj=function(list,value){var res={},i=list.length if(value===undefined){value=true} while(i--> 0){res[list[i]]=value} @@ -149,62 +164,6 @@ else parent.insert(insert_at,new_node) assign.tree[1]=val return new_node} -var $add_yield_from_code=$B.parser.$add_yield_from_code=function(yield_ctx){var pnode=$get_node(yield_ctx) -var generator=$get_scope(yield_ctx).C.tree[0] -pnode.yield_atoms.splice(pnode.yield_atoms.indexOf(this),1) -generator.yields.splice(generator.yields.indexOf(this),1) -var INDENT=" ".repeat(pnode.indent) -var replace_with= -INDENT+"import sys"+"\n"+ -INDENT+"try:"+"\n"+ -INDENT+" _y = next(_i)"+"\n"+ -INDENT+"except StopIteration as _e:"+"\n"+ -INDENT+" _r = _e.value"+"\n"+ -INDENT+"else:"+"\n"+ -INDENT+" while 1:"+"\n"+ -INDENT+" try:"+"\n"+ -INDENT+" _s = yield _y"+"\n"+ -INDENT+" except GeneratorExit as _e:"+"\n"+ -INDENT+" try:"+"\n"+ -INDENT+" _m = _i.close"+"\n"+ -INDENT+" except AttributeError:"+"\n"+ -INDENT+" pass"+"\n"+ -INDENT+" else:"+"\n"+ -INDENT+" _m()"+"\n"+ -INDENT+" raise _e"+"\n"+ -INDENT+" except BaseException as _e:"+"\n"+ -INDENT+" _x = sys.exc_info()"+"\n"+ -INDENT+" try:"+"\n"+ -INDENT+" _m = _i.throw"+"\n"+ -INDENT+" except AttributeError:"+"\n"+ -INDENT+" raise _e"+"\n"+ -INDENT+" else:"+"\n"+ -INDENT+" try:"+"\n"+ -INDENT+" _y = _m(*_x)"+"\n"+ -INDENT+" except StopIteration as _e:"+"\n"+ -INDENT+" _r = _e.value"+"\n"+ -INDENT+" break"+"\n"+ -INDENT+" else:"+"\n"+ -INDENT+" try:"+"\n"+ -INDENT+" if _s is None:"+"\n"+ -INDENT+" _y = next(_i)"+"\n"+ -INDENT+" else:"+"\n"+ -INDENT+" _y = _i.send(_s)"+"\n"+ -INDENT+" except StopIteration as _e:"+"\n"+ -INDENT+" _r = _e.value"+"\n"+ -INDENT+" break"+"\n"; -var repl={_i :create_temp_name('__i'),_y :create_temp_name('__y'),_r :create_temp_name('__r'),_e :create_temp_name('__e'),_s :create_temp_name('__s'),_m :create_temp_name('__m'),} -pnode.bindings=pnode.bindings ||{} -for(attr in repl){replace_with=replace_with.replace(new RegExp("\\b"+attr+"\\b",'g'),repl[attr]) -pnode.bindings[repl[attr]]=true} -$tokenize(pnode,replace_with) -var params={iter_name:repl._i,result_var_name:repl._r,yield_expr:yield_ctx,} -if(yield_ctx.parent.type==='assign'){params.save_result=true -params.assign_ctx=yield_ctx.parent -params.save_result_rank=pnode.parent.children.length- -pnode.parent.children.indexOf(pnode)} -var new_node=new $YieldFromMarkerNode(params) -replace_node(pnode,new_node)} var chained_comp_num=0 var $_SyntaxError=$B.parser.$_SyntaxError=function(C,msg,indent){ var ctx_node=C @@ -214,24 +173,31 @@ while(root.parent !==undefined){root=root.parent} var module=tree_node.module,src=root.src,line_num=tree_node.line_num if(src){line_num=src.substr(0,$pos).split("\n").length} if(root.line_info){line_num=root.line_info} -if(indent !==undefined){line_num++} -if(indent===undefined){if(Array.isArray(msg)){$B.$SyntaxError(module,msg[0],src,$pos,line_num)} +if(indent===undefined ||typeof indent !="number"){if(Array.isArray(msg)){$B.$SyntaxError(module,msg[0],src,$pos,line_num,root)} if(msg==="Triple string end not found"){ $B.$SyntaxError(module,'invalid syntax : triple string end not found',src,$pos,line_num,root)} -$B.$SyntaxError(module,'invalid syntax',src,$pos,line_num,root)}else{throw $B.$IndentationError(module,msg,src,$pos,line_num,root)}} +var message='invalid syntax' +if(!(msg.startsWith("token "))){message+=' ('+msg+')'} +$B.$SyntaxError(module,message,src,$pos,line_num,root)}else{throw $B.$IndentationError(module,msg,src,$pos,line_num,root)}} +function SyntaxWarning(C,msg){var node=$get_node(C),module=$get_module(C),src=module.src,lines=src.split("\n"),message=`Module ${module.module}line ${node.line_num}:${msg}\n`+ +' '+lines[node.line_num-1] +$B.$getattr($B.stderr,"write")(message)} +function check_assignment(C){var ctx=C,forbidden=['assert','del','import','raise','return'] +while(ctx){if(forbidden.indexOf(ctx.type)>-1){$_SyntaxError(C,'invalid syntax - assign')}else if(ctx.type=="expr" && +ctx.tree[0].type=="op"){if($B.op2method.comparisons[ctx.tree[0].op]!==undefined){$_SyntaxError(C,["cannot assign to comparison"])}else{$_SyntaxError(C,["cannot assign to operator"])}} +ctx=ctx.parent}} var $Node=$B.parser.$Node=function(type){this.type=type -this.children=[] -this.yield_atoms=[] -this.add=function(child){ +this.children=[]} +$Node.prototype.add=function(child){ this.children[this.children.length]=child child.parent=this child.module=this.module} -this.insert=function(pos,child){ +$Node.prototype.insert=function(pos,child){ this.children.splice(pos,0,child) child.parent=this child.module=this.module} -this.toString=function(){return ""} -this.show=function(indent){ +$Node.prototype.toString=function(){return ""} +$Node.prototype.show=function(indent){ var res='' if(this.type==='module'){this.children.forEach(function(child){res+=child.show(indent)}) return res} @@ -244,7 +210,7 @@ this.children.forEach(function(child){res+='['+i+'] '+child.show(indent+4)}) if(this.children.length > 0){res+=' '.repeat(indent) res+='}\n'} return res} -this.to_js=function(indent){ +$Node.prototype.to_js=function(indent){ if(this.js !==undefined){return this.js} this.res=[] this.unbound=[] @@ -263,32 +229,76 @@ if(this.children.length > 0){this.res.push(' '.repeat(indent)) this.res.push('}\n')}} this.js=this.res.join('') return this.js} -this.transform=function(rank){ -if(this.yield_atoms.length > 0){ -this.parent.children.splice(rank,1) -var offset=0 -this.yield_atoms.forEach(function(atom){ -var temp_node=new $Node() -var js='var $yield_value'+$loop_num -js+=' = '+(atom.to_js()||'None') -new $NodeJSCtx(temp_node,js) -this.parent.insert(rank+offset,temp_node) -var yield_node=new $Node() -this.parent.insert(rank+offset+1,yield_node) -var yield_expr=new $YieldCtx(new $NodeCtx(yield_node)) -new $StringCtx(yield_expr,'$yield_value'+$loop_num) -var set_yield=new $Node() -set_yield.is_set_yield_value=true -set_yield.after_yield=true -js=$loop_num -new $NodeJSCtx(set_yield,js) -this.parent.insert(rank+offset+2,set_yield) -atom.to_js=(function(x){return function(){return '$yield_value'+x}})($loop_num) -$loop_num++ -offset+=3},this) -this.parent.insert(rank+offset,this) -this.yield_atoms=[] -return offset} +$Node.prototype.transform=function(rank){ +if(this.has_await){this.parent.insert(rank,$NodeJS("var save_stack = $B.save_stack()")) +this.parent.insert(rank+2,$NodeJS("$B.restore_stack(save_stack, $locals)")) +this.has_await=false +return 1} +if(this.has_yield && ! this.has_yield.transformed){ +var parent=this.parent +if(this.has_yield.from){var new_node=new $Node() +var new_ctx=new $NodeCtx(new_node) +var new_expr=new $ExprCtx(new_ctx,'js',false) +var _id=new $RawJSCtx(new_expr,`var _i${this.has_yield.from_num}`) +var assign=new $AssignCtx(new_expr) +var right=new $ExprCtx(assign) +right.tree=this.has_yield.tree +parent.insert(rank,new_node) +var pnode=$get_node(this.has_yield) +var n=this.has_yield.from_num +var replace_with=`$B.$import("sys",[],{}) +_i${n}=_b_.iter(_i${n}) +var $failed${n}=false +try{var _y${n}=_b_.next(_i${n})}catch(_e){$B.set_exc(_e) +$failed${n}=true +$B.pmframe=$B.last($B.frames_stack) +_e=$B.exception(_e) +if(_e.__class__===_b_.StopIteration){var _r${n}=$B.$getattr(_e,"value")}else{throw _e}} +if(! $failed${n}){while(true){var $failed1${n}=false +try{$B.leave_frame({$locals}) +var _s${n}=yield _y${n} +$B.frames_stack.push($top_frame)}catch(_e){if(_e.__class__===_b_.GeneratorExit){var $failed2${n}=false +try{var _m${n}=$B.$geatttr(_i${n},"close")}catch(_e1){$failed2${n}=true +if(_e1.__class__ !==_b_.AttributeError){throw _e1}} +if(! $failed2${n}){$B.$call(_m${n})()} +throw _e}else if($B.is_exc(_e,[_b_.BaseException])){var _x=$B.$call($B.$getattr($locals.sys,"exc_info"))() +var $failed3${n}=false +try{var _m${n}=$B.$getattr(_i${n},"throw")}catch(err){$failed3${n}=true +if($B.is_exc(err,[_b_.AttributeError])){throw err}} +if(! $failed3${n}){try{_y${n}=$B.$call(_m${n}).apply(null,_b_.list.$factory(_x${n}))}catch(err){if($B.$is_exc(err,[_b_.StopIteration])){_r${n}=$B.$getattr(err,"value") +break} +throw err}}}} +if(! $failed1${n}){try{if(_s${n}===_b_.None){_y${n}=_b_.next(_i${n})}else{_y${n}=$B.$call($B.$getattr(_i${n},"send"))(_s${n})}}catch(err){if($B.is_exc(err,[_b_.StopIteration])){_r${n}=$B.$getattr(err,"value") +break} +throw err}}}}` +parent.insert(rank+1,$NodeJS(replace_with)) +return 3} +parent.children.splice(rank,1) +if(this.has_yield.tree[0].type==='abstract_expr'){new_node=$NodeJS("var result = _b_.None")}else{var new_node=new $Node() +var new_ctx=new $NodeCtx(new_node) +var new_expr=new $ExprCtx(new_ctx,'js',false) +var _id=new $RawJSCtx(new_expr,'var result') +var assign=new $AssignCtx(new_expr) +assign.tree[1]=this.has_yield.tree[0] +_id.parent=assign} +new_node.line_num=this.line_num +parent.insert(rank,new_node) +var try_node=new $NodeJS("try") +try_node.add($NodeJS("$B.leave_frame({$locals})")) +try_node.add(this) +parent.insert(rank+1,try_node) +this.has_yield.to_js=function(){return 'yield result'} +this.has_yield.transformed=true +var i=0 +while(i < try_node.children.length){var offset=try_node.children[i].transform(i) +if(offset===undefined){offset=1} +i+=offset} +var catch_node=$NodeJS(`catch(err${this.line_num})`) +catch_node.add($NodeJS("$B.frames_stack.push($top_frame)")) +catch_node.add($NodeJS(`throw err${this.line_num}`)) +parent.insert(rank+2,catch_node) +parent.insert(rank+3,$NodeJS("$B.frames_stack.push($top_frame)")) +return 2} if(this.type==='module'){ this.__doc__=$get_docstring(this) var i=0 @@ -302,109 +312,190 @@ while(i < this.children.length){var offset=this.children[i].transform(i) if(offset===undefined){offset=1} i+=offset} if(ctx_offset===undefined){ctx_offset=1} -if(this.C && this.C.tree !==undefined && -this.C.tree[0].type=="generator"){var def_node=this,def_ctx=def_node.C.tree[0] -var blocks=[],node=def_node.parent_block,is_comp=node.is_comp -while(true){var node_id=node.id.replace(/\./g,'_'),block='"$locals_'+node_id+'": ' -if(is_comp){block+='$B.clone($locals_'+node_id+')'}else{block+='$locals_'+node_id} -blocks.push(block) -node=node.parent_block -if(node===undefined ||node.id=='__builtins__'){break}} -blocks='{'+blocks+'}' -var parent=this.parent -while(parent !==undefined && parent.id===undefined){parent=parent.parent} -var g=$B.$BRgenerator(def_ctx.name,blocks,def_ctx.id,def_node),block_id=parent.id.replace(/\./g,'_'),name=def_ctx.decorated ? def_ctx.alias : -def_ctx.name+def_ctx.num,res='var '+def_ctx.name+def_ctx.num+' = '+ -'$locals_'+block_id+'["'+def_ctx.name+ -'"] = $B.genfunc("'+ -def_ctx.name+'", '+blocks+',['+g+'],'+ -def_ctx.default_str+')' -var new_node=$NodeJS(res) -new_node.bindings=this.bindings -this.parent.children.splice(rank,1) -this.parent.insert(rank+offset-1,new_node)} return ctx_offset}} -this.clone=function(){var res=new $Node(this.type) +$Node.prototype.clone=function(){var res=new $Node(this.type) for(var attr in this){res[attr]=this[attr]} -return res}} -var $YieldFromMarkerNode=$B.parser.$YieldFromMarkerNode=function(params){$Node.apply(this,['marker']) -new $NodeCtx(this) -this.params=params -this.tree -this.transform=function(rank){add_identnode(this.parent,rank,params.iter_name,new $JSCode('$B.$iter('+params.yield_expr.tree[0].to_js()+ -')') -) -if(params.save_result){var assign_ctx=params.assign_ctx -assign_ctx.tree.pop() -var expr_ctx=new $ExprCtx(assign_ctx,'id',true) -var idctx=new $IdCtx(expr_ctx,params.result_var_name) -assign_ctx.tree[1]=expr_ctx -var node=new $Node() -node.C=assign_ctx -assign_ctx.node=node -var new_rank=params.save_result_rank+rank+1 -this.parent.insert(new_rank,node) -assign_ctx.transform(node,new_rank)} -return 2}} -var $MarkerNode=$B.parser.$MarkerNode=function(name){$Node.apply(this,['marker']) -new $NodeCtx(this) -this._name=name -this.transform=function(rank){return 1} -this.to_js=function(){return ''}} +return res} +$Node.prototype.clone_tree=function(){var res=new $Node(this.type) +for(var attr in this){res[attr]=this[attr]} +res.children=[] +for(var i=0,len=this.children.length;i < len;i++){res.add(this.children[i].clone_tree())} +return res} var $AbstractExprCtx=$B.parser.$AbstractExprCtx=function(C,with_commas){this.type='abstract_expr' this.with_commas=with_commas this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(abstract_expr '+with_commas+') '+this.tree} -this.to_js=function(){this.js_processed=true +C.tree[C.tree.length]=this} +$AbstractExprCtx.prototype.toString=function(){return '(abstract_expr '+this.with_commas+') '+this.tree} +$AbstractExprCtx.prototype.transition=function(token,value){var C=this +var packed=C.packed,is_await=C.is_await,assign=C.assign +if(! assign){switch(token){case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +case 'yield': +C.parent.tree.pop() +var commas=C.with_commas +C=C.parent +C.packed=packed +C.is_await=is_await +if(assign){console.log("set assign to parent",C) +C.assign=assign}}} +switch(token){case 'await': +return new $AwaitCtx(C) +case 'id': +return new $IdCtx(new $ExprCtx(C,'id',commas),value) +case 'str': +return new $StringCtx(new $ExprCtx(C,'str',commas),value) +case 'bytes': +return new $StringCtx(new $ExprCtx(C,'bytes',commas),value) +case 'int': +return new $NumberCtx('int',new $ExprCtx(C,'int',commas),value) +case 'float': +return new $NumberCtx('float',new $ExprCtx(C,'float',commas),value) +case 'imaginary': +return new $NumberCtx('imaginary',new $ExprCtx(C,'imaginary',commas),value) +case '(': +return new $ListOrTupleCtx( +new $ExprCtx(C,'tuple',commas),'tuple') +case '[': +return new $ListOrTupleCtx( +new $ExprCtx(C,'list',commas),'list') +case '{': +return new $DictOrSetCtx( +new $ExprCtx(C,'dict_or_set',commas)) +case '.': +return new $EllipsisCtx( +new $ExprCtx(C,'ellipsis',commas)) +case 'not': +if(C.type=='op' && C.op=='is'){ +C.op='is_not' +return C} +return new $NotCtx(new $ExprCtx(C,'not',commas)) +case 'lambda': +return new $LambdaCtx(new $ExprCtx(C,'lambda',commas)) +case 'op': +var tg=value +switch(tg){case '*': +C.parent.tree.pop() +var commas=C.with_commas +C=C.parent +return new $PackedCtx( +new $ExprCtx(C,'expr',commas)) +case '-': +case '~': +case '+': +C.parent.tree.pop() +var left=new $UnaryCtx(C.parent,tg) +if(tg=='-'){var op_expr=new $OpCtx(left,'unary_neg')}else if(tg=='+'){var op_expr=new $OpCtx(left,'unary_pos')}else{var op_expr=new $OpCtx(left,'unary_inv')} +return new $AbstractExprCtx(op_expr,false) +case 'not': +C.parent.tree.pop() +var commas=C.with_commas +C=C.parent +return new $NotCtx( +new $ExprCtx(C,'not',commas))} +$_SyntaxError(C,'token '+token+' after '+ +C) +case '=': +$_SyntaxError(C,'token '+token+' after '+ +C) +case 'yield': +return new $AbstractExprCtx(new $YieldCtx(C),true) +case ':': +if(C.parent.type=="sub" || +(C.parent.type=="list_or_tuple" && +C.parent.parent.type=="sub")){return new $AbstractExprCtx(new $SliceCtx(C.parent),false)} +return $transition(C.parent,token,value) +case ')': +case ',': +switch(C.parent.type){case 'list_or_tuple': +case 'slice': +case 'call_arg': +case 'op': +case 'yield': +break +case 'annotation': +$_SyntaxError(C,"empty annotation") +default: +$_SyntaxError(C,token)}} +return $transition(C.parent,token,value)} +$AbstractExprCtx.prototype.to_js=function(){this.js_processed=true if(this.type==='list')return '['+$to_js(this.tree)+']' -return $to_js(this.tree)}} +return $to_js(this.tree)} var $AliasCtx=$B.parser.$AliasCtx=function(C){ this.type='ctx_manager_alias' this.parent=C this.tree=[] C.tree[C.tree.length-1].alias=this} +$AliasCtx.prototype.transition=function(token,value){var C=this +switch(token){case ',': +case ':': +C.parent.set_alias(C.tree[0].tree[0]) +return $transition(C.parent,token,value)} +$_SyntaxError(C,'token '+token+' after '+C)} var $AnnotationCtx=$B.parser.$AnnotationCtx=function(C){ this.type='annotation' this.parent=C this.tree=[] C.annotation=this var scope=$get_scope(C) +if(scope.binding.__annotations__===undefined){ +scope.binding.__annotations__=true +C.create_annotations=true} if(scope.ntype=="def" && C.tree && C.tree.length > 0 && C.tree[0].type=="id"){var name=C.tree[0].value if(scope.globals && scope.globals.has(name)>-1){$_SyntaxError(C,["annotated name '"+name+ "' can't be global"])} scope.annotations=scope.annotations ||new Set() scope.annotations.add(name) -scope.binding=scope.binding ||{} -scope.binding[name]=true} -this.toString=function(){return '(annotation) '+this.tree} -this.to_js=function(){return $to_js(this.tree)}} +if(! C.$in_parens){scope.binding=scope.binding ||{} +scope.binding[name]=true}}} +$AnnotationCtx.prototype.toString=function(){return '(annotation) '+this.tree} +$AnnotationCtx.prototype.transition=function(token,value){var C=this +if(token=="eol" && C.tree.length==1 && +C.tree[0].tree.length==0){$_SyntaxError(C,"empty annotation")}else if(token==':' && C.parent.type !="def"){$_SyntaxError(C,"more than one annotation")}else if(token=="augm_assign"){$_SyntaxError(C,"augmented assign as annotation")}else if(token=="op"){$_SyntaxError(C,"operator as annotation")} +return $transition(C.parent,token)} +$AnnotationCtx.prototype.to_js=function(){return $to_js(this.tree)} var $AssertCtx=$B.parser.$AssertCtx=function(C){ this.type='assert' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(assert) '+this.tree} -this.transform=function(node,rank){if(this.tree[0].type=='list_or_tuple'){ -var condition=this.tree[0].tree[0] -var message=this.tree[0].tree[1]}else{var condition=this.tree[0] +C.tree[C.tree.length]=this} +$AssertCtx.prototype.toString=function(){return '(assert) '+this.tree} +$AssertCtx.prototype.transition=function(token,value){var C=this +if(token==","){if(this.tree.length > 1){$_SyntaxError(C,"too many commas after assert")} +return new $AbstractExprCtx(this,false)} +if(token=='eol'){return $transition(C.parent,token)} +$_SyntaxError(C,token)} +$AssertCtx.prototype.transform=function(node,rank){if(this.tree.length > 1){ +var condition=this.tree[0] +var message=this.tree[1]}else{var condition=this.tree[0] var message=null} +if(this.tree[0].type=="expr" && this.tree[0].name=="tuple" && +this.tree[0].tree[0].tree.length > 1){SyntaxWarning(this,"assertion is always true, perhaps "+ +"remove parentheses?")} var new_ctx=new $ConditionCtx(node.C,'if') var not_ctx=new $NotCtx(new_ctx) not_ctx.tree=[condition] node.C=new_ctx var new_node=new $Node() -var js='throw AssertionError.$factory("AssertionError")' -if(message !==null){js='throw AssertionError.$factory(str.$factory('+ +var js='throw _b_.AssertionError.$factory()' +if(message !==null){js='throw _b_.AssertionError.$factory(_b_.str.$factory('+ message.to_js()+'))'} new $NodeJSCtx(new_node,js) -node.add(new_node)}} +node.add(new_node)} var $AssignCtx=$B.parser.$AssignCtx=function(C,expression){ -var ctx=C -while(ctx){if(ctx.type=='assert'){$_SyntaxError(C,'invalid syntax - assign')} -ctx=ctx.parent} +check_assignment(C) +if(C.type=="expr" && C.tree[0].type=="lambda"){$_SyntaxError(C,["cannot assign to lambda"])} this.type='assign' if(expression=='expression'){this.expression=true console.log("parent of assign expr",C.parent)} @@ -413,22 +504,28 @@ C.parent.tree[C.parent.tree.length]=this this.parent=C.parent this.tree=[C] var scope=$get_scope(this) -if(C.type=='expr' && C.tree[0].type=='call'){$_SyntaxError(C,["can't assign to function call "])}else if(C.type=='list_or_tuple' || +if(C.type=='expr' && C.tree[0].type=='call'){$_SyntaxError(C,["cannot assign to function call "])}else if(C.type=='list_or_tuple' || (C.type=='expr' && C.tree[0].type=='list_or_tuple')){if(C.type=='expr'){C=C.tree[0]} C.bind_ids(scope)}else if(C.type=='assign'){C.tree.forEach(function(elt){var assigned=elt.tree[0] if(assigned.type=='id'){$bind(assigned.value,scope,this)}},this)}else{var assigned=C.tree[0] -if(assigned && assigned.type=='id'){if(noassign[assigned.value]===true){$_SyntaxError(C,["can't assign to keyword"])} +if(assigned && assigned.type=='id'){if(noassign[assigned.value]===true){$_SyntaxError(C,["cannot assign to keyword"])} assigned.bound=true -if(!$B._globals[scope.id]|| -$B._globals[scope.id][assigned.value]===undefined){ +if(!scope.globals ||!scope.globals.has(assigned.value)){ var node=$get_node(this) node.bound_before=Object.keys(scope.binding) $bind(assigned.value,scope,this)}else{ var module=$get_module(C) -$bind(assigned.value,module,this)}}else if(["str","int","float","complex"].indexOf(assigned.type)>-1){$_SyntaxError(C,["can't assign to literal"])}else if(assigned.type=="unary"){$_SyntaxError(C,["can't assign to operator"])}} -this.guess_type=function(){return} -this.toString=function(){return '(assign) '+this.tree[0]+'='+this.tree[1]} -this.transform=function(node,rank){ +assigned.global_module=module.module +$bind(assigned.value,module,this)}}else if(["str","int","float","complex"].indexOf(assigned.type)>-1){$_SyntaxError(C,["cannot assign to literal"])}else if(assigned.type=="unary"){$_SyntaxError(C,["cannot assign to operator"])}}} +$AssignCtx.prototype.guess_type=function(){return} +$AssignCtx.prototype.toString=function(){return '(assign) '+this.tree[0]+'='+this.tree[1]} +$AssignCtx.prototype.transition=function(token,value){var C=this +if(token=='eol'){if(C.tree[1].type=='abstract_expr'){$_SyntaxError(C,'token '+token+' after '+ +C)} +C.guess_type() +return $transition(C.parent,'eol')} +$_SyntaxError(C,'token '+token+' after '+C)} +$AssignCtx.prototype.transform=function(node,rank){ var scope=$get_scope(this) var left=this.tree[0],right=this.tree[1],assigned=[] while(left.type=='assign'){assigned.push(left.tree[1]) @@ -440,7 +537,11 @@ var nleft=new $RawJSCtx(ctx,'var $temp'+$loop_num) nleft.tree=ctx.tree var nassign=new $AssignCtx(nleft) nassign.tree[1]=right -assigned.forEach(function(elt){var new_node=new $Node(),node_ctx=new $NodeCtx(new_node) +assigned.forEach(function(elt){if(elt.type=="expr" && elt.tree[0].type=="list_or_tuple" && +elt.tree[0].real=="tuple" && +elt.tree[0].tree.length==1){ +elt=elt.tree[0].tree[0]} +var new_node=new $Node(),node_ctx=new $NodeCtx(new_node) new_node.locals=node.locals new_node.line_num=node.line_num node.parent.insert(rank+1,new_node) @@ -448,14 +549,14 @@ elt.parent=node_ctx var assign=new $AssignCtx(elt) new $RawJSCtx(assign,'$temp'+$loop_num)}) $loop_num++ -return assigned.length-1} +this.tree[0]=left +return} var left_items=null switch(left.type){case 'expr': if(left.tree.length > 1){left_items=left.tree}else if(left.tree[0].type=='list_or_tuple' || left.tree[0].type=='target_list'){left_items=left.tree[0].tree}else if(left.tree[0].type=='id'){ var name=left.tree[0].value -if($B._globals && $B._globals[scope.id] -&& $B._globals[scope.id][name]){}else{left.tree[0].bound=true}} +if(scope.globals && scope.globals.has(name)){}else{left.tree[0].bound=true}} break case 'target_list': case 'list_or_tuple': @@ -524,14 +625,14 @@ if(expr.type=='packed' || min_length-- break}} node.parent.insert(rank++,$NodeJS('if('+rlname+'.length<'+min_length+'){'+ -'throw ValueError.$factory('+ +'throw _b_.ValueError.$factory('+ '"need more than " +'+rlname+ '.length + " value" + ('+rlname+ '.length > 1 ?'+' "s" : "") + " to unpack")}' ) ) if(packed==null){node.parent.insert(rank++,$NodeJS('if('+rlname+'.length>'+min_length+'){'+ -'throw ValueError.$factory('+ +'throw _b_.ValueError.$factory('+ '"too many values to unpack '+ '(expected '+left_items.length+')"'+ ')'+ @@ -549,12 +650,12 @@ if(packed==null ||i < packed){js+='['+i+']'}else if(i==packed){js+='.slice('+i+' (left_items.length-i-1)+')'}else{js+='['+rlname+'.length-'+(left_items.length-i)+']'} assign.tree[1]=new $JSCode(js)}) $loop_num++}} -this.to_js=function(){this.js_processed=true +$AssignCtx.prototype.to_js=function(){this.js_processed=true if(this.parent.type=='call'){ return '{$nat:"kw",name:'+this.tree[0].to_js()+ ',value:'+this.tree[1].to_js()+'}'} var left=this.tree[0] -while(left.type=='expr'){left=left.tree[0]} +while(left.type=='expr' && ! left.assign){left=left.tree[0]} var right=this.tree[1] if(left.type=='attribute' ||left.type=='sub'){ var right_js=right.to_js() @@ -574,7 +675,7 @@ left.func='getattr' if(left.assign_self){return res+left_to_js[0]+rvar+left_to_js[1]+rvar+')'} res+=left_to_js res=res.substr(0,res.length-1) -return res+','+rvar+');None;'} +return res+','+rvar+');_b_.None;'} if(left.type=='sub'){ var seq=left.value.to_js(),temp='$temp'+$loop_num,type if(left.value.type=='id'){type=$get_node(this).locals[left.value.value]} @@ -595,18 +696,26 @@ right.to_js()+')'} if(type=='list'){return res} res+='\n}else{' if(left.tree.length==1){res+='$B.$setitem('+left.value.to_js()+ -','+left.tree[0].to_js()+','+right_js+')};None;'}else{left.func='setitem' +','+left.tree[0].to_js()+','+right_js+')};_b_.None;'}else{left.func='setitem' res+=left.to_js() res=res.substr(0,res.length-1) left.func='getitem' -res+=','+right_js+')};None;'} +res+=','+right_js+')};_b_.None;'} return res}} -return left.to_js()+' = '+right.to_js()}} +return left.to_js()+' = '+right.to_js()} var $AsyncCtx=$B.parser.$AsyncCtx=function(C){ this.type='async' this.parent=C -C.async=true -this.toString=function(){return '(async)'}} +C.async=true} +$AsyncCtx.prototype.toString=function(){return '(async)'} +$AsyncCtx.prototype.transition=function(token,value){var C=this +if(token=="def"){return $transition(C.parent,token,value)}else if(token=="for" ||token=="with"){var ntype=$get_scope(C).ntype +if(ntype !=="def" && ntype !="generator"){$_SyntaxError(C,["'async "+token+ +"' outside async function"])} +var ctx=$transition(C.parent,token,value) +ctx.parent.async=true +return ctx} +$_SyntaxError(C,'token '+token+' after '+C)} var $AttrCtx=$B.parser.$AttrCtx=function(C){ this.type='attribute' this.value=C.tree[0] @@ -614,25 +723,34 @@ this.parent=C C.tree.pop() C.tree[C.tree.length]=this this.tree=[] -this.func='getattr' -this.toString=function(){return '(attr) '+this.value+'.'+this.name} -this.to_js=function(){this.js_processed=true +this.func='getattr' } +$AttrCtx.prototype.toString=function(){return '(attr) '+this.value+'.'+this.name} +$AttrCtx.prototype.transition=function(token,value){var C=this +if(token==='id'){var name=value +if(noassign[name]===true){$_SyntaxError(C,["cannot assign to "+name])} +name=$mangle(name,C) +C.name=name +return C.parent} +$_SyntaxError(C,token)} +$AttrCtx.prototype.to_js=function(){this.js_processed=true var js=this.value.to_js() if(this.func=="setattr" && this.value.type=="id"){var scope=$get_scope(this),parent=scope.parent if(scope.ntype=="def"){if(parent.ntype=="class"){var params=scope.C.tree[0].positional_list if(this.value.value==params[0]&& parent.C && parent.C.tree[0].args===undefined){ this.assign_self=true -return[js+".__class__ && !"+ +return[js+".__class__ && "+js+".__dict__ && !"+ js+".__class__.$has_setattr && ! "+js+ -".$is_class ? "+js+ -".__dict__.$string_dict['"+this.name+ -"'] = "," : $B.$setattr("+js+ +".$is_class ? _b_.dict.$setitem("+js+ +".__dict__, '"+$B.from_alias(this.name)+ +"', ",") : $B.$setattr("+js+ ', "'+this.name+'", ']}}}} if(this.func=='setattr'){ -return '$B.$setattr('+js+',"'+this.name+'")'}else{return '$B.$getattr('+js+',"'+this.name+'")'}}} +return '$B.$setattr('+js+',"'+this.name+'")'}else{return '$B.$getattr('+js+',"'+this.name+'")'}} var $AugmentedAssignCtx=$B.parser.$AugmentedAssignCtx=function(C,op){ +check_assignment(C) this.type='augm_assign' +this.C=C this.parent=C.parent C.parent.tree.pop() C.parent.tree[C.parent.tree.length]=this @@ -641,14 +759,19 @@ this.tree=[C] var scope=this.scope=$get_scope(this) if(C.type=='expr'){var assigned=C.tree[0] if(assigned.type=='id'){var name=assigned.value -if(noassign[name]===true){$_SyntaxError(C,["can't assign to keyword"])}else if((scope.ntype=='def' ||scope.ntype=='generator')&& +if(noassign[name]===true){$_SyntaxError(C,["cannot assign to keyword"])}else if((scope.ntype=='def' ||scope.ntype=='generator')&& (scope.binding[name]===undefined)){if(scope.globals===undefined || ! scope.globals.has(name)){ -assigned.unbound=true}}}else if(['str','int','float','complex'].indexOf(assigned.type)>-1){$_SyntaxError(C,["can't assign to literal"])}} +assigned.unbound=true}}}else if(['str','int','float','complex'].indexOf(assigned.type)>-1){$_SyntaxError(C,["cannot assign to literal"])}} $get_node(this).bound_before=Object.keys(scope.binding) -this.module=scope.module -this.toString=function(){return '(augm assign) '+this.tree} -this.transform=function(node,rank){var func='__'+$operators[op]+'__',offset=0,parent=node.parent,line_num=node.line_num,lnum_set=false +this.module=scope.module} +$AugmentedAssignCtx.prototype.toString=function(){return '(augm assign) '+this.tree} +$AugmentedAssignCtx.prototype.transition=function(token,value){var C=this +if(token=='eol'){if(C.tree[1].type=='abstract_expr'){$_SyntaxError(C,'token '+token+' after '+ +C)} +return $transition(C.parent,'eol')} +$_SyntaxError(C,'token '+token+' after '+C)} +$AugmentedAssignCtx.prototype.transform=function(node,rank){var C=this.C,op=this.op,func='__'+$operators[op]+'__',offset=0,parent=node.parent,line_num=node.line_num,lnum_set=false parent.children.splice(rank,1) var left_is_id=(this.tree[0].type=='expr' && this.tree[0].tree[0].type=='id') @@ -656,14 +779,15 @@ if(left_is_id){var left_bound_to_int= this.tree[0].tree[0].bindingType(this.scope)=="int" this.tree[0].tree[0].augm_assign=true if($B.debug > 0){var check_node=$NodeJS('if('+this.tree[0].to_js()+ -' === undefined){throw NameError.$factory("name \'' + - this.tree[0].tree[0].value + '\' is not defined")}') -check_node.forced_line_num=node.line_num +' === undefined){throw _b_.NameError.$factory("name \'' + + this.tree[0].tree[0].value + '\' is not defined")}') node.parent.insert(rank,check_node) offset++} var left_id=this.tree[0].tree[0].value,was_bound=this.scope.binding[left_id]!==undefined,left_id_unbound=this.tree[0].tree[0].unbound} var right_is_int=(this.tree[1].type=='expr' && this.tree[1].tree[0].type=='int') +if(right_is_int){var value=this.tree[1].tree[0].value,to_int=parseInt(value[1],value[0]) +right_is_int=(to_int > $B.min_int)&&(to_int < $B.max_int)} var right=right_is_int ? this.tree[1].tree[0].to_js():'$temp' if(!right_is_int){ var new_node=new $Node() @@ -706,7 +830,12 @@ parent.insert(rank+offset,new_node) in_class=true offset++}}} var left=C.tree[0].to_js() -if(left_bound_to_int && right_is_int){parent.insert(rank+offset,$NodeJS(left+" "+op+" "+right)) +if(C.tree[0].type=="id"){var binding_scope=C.tree[0].firstBindingScopeId(),left_value=C.tree[0].value +if(binding_scope){left="$locals_"+binding_scope.replace(/\./g,'_')+ +'["'+left_value+'"]'}else{left='$locals["'+left_value+'"]'}} +if(left_bound_to_int && right_is_int && +op !="//="){ +parent.insert(rank+offset,$NodeJS(left+" "+op+" "+right)) return offset++} prefix=prefix && !C.tree[0].unknown_binding && !left_id_unbound var op1=op.charAt(0) @@ -729,7 +858,7 @@ if(C.tree[0].type=='sub' && ('+='==op ||'-='==op ||'*='==op)&& C.tree[0].tree.length==1){var js1='$B.augm_item_'+aaops[op]+'('+ C.tree[0].value.to_js()+','+ -C.tree[0].tree[0].to_js()+','+right+');None;' +C.tree[0].tree[0].to_js()+','+right+');_b_.None;' var new_node=new $Node() if(!lnum_set){new_node.line_num=line_num;lnum_set=true} new $NodeJSCtx(new_node,js1) @@ -740,7 +869,7 @@ var new_node=new $Node() if(!lnum_set){new_node.line_num=line_num;lnum_set=true} var js='' if(prefix){js+='else '} -js+='if(!hasattr('+C.to_js()+',"'+func+'"))' +js+='if(! _b_.hasattr('+C.to_js()+',"'+func+'"))' new $NodeJSCtx(new_node,js) parent.insert(rank+offset,new_node) offset++ @@ -750,7 +879,7 @@ aa1.line_num=node.line_num new_node.add(aa1) var ctx1=new $NodeCtx(aa1) var expr1=new $ExprCtx(ctx1,'clone',false) -if(left_id_unbound){new $RawJSCtx(expr1,'$locals["'+left_id+'"]')}else{expr1.tree=C.tree +if(left_id_unbound){new $RawJSCtx(expr1,left)}else{expr1.tree=C.tree expr1.tree.forEach(function(elt){elt.parent=expr1})} var assign1=new $AssignCtx(expr1) var new_op=new $OpCtx(expr1,op.substr(0,op.length-1)) @@ -766,7 +895,9 @@ aa2.line_num=node.line_num else_node.add(aa2) var ctx2=new $NodeCtx(aa2) var expr2=new $ExprCtx(ctx2,'clone',false) -if(left_id_unbound){new $RawJSCtx(expr2,'$locals["'+left_id+'"]')}else{expr2.tree=C.tree +if(left_id_unbound){var js=left +if(! binding_scope){js='$B.$local_search("'+left_value+'");'+left} +new $RawJSCtx(expr2,js)}else{expr2.tree=C.tree expr2.tree.forEach(function(elt){elt.parent=expr2})} var assign2=new $AssignCtx(expr2) assign2.tree.push($NodeJS('$B.$getattr('+C.to_js()+',"'+ @@ -775,13 +906,21 @@ expr2.parent.tree.pop() expr2.parent.tree.push(assign2) if(left_is_id && !was_bound && !this.scope.blurred){this.scope.binding[left_id]=undefined} return offset} -this.to_js=function(){return ''}} +$AugmentedAssignCtx.prototype.to_js=function(){return ''} var $AwaitCtx=$B.parser.$AwaitCtx=function(C){ this.type='await' this.parent=C this.tree=[] C.tree.push(this) -this.to_js=function(){return 'await $B.promise('+$to_js(this.tree)+')'}} +var p=C +while(p){if(p.type=="list_or_tuple"){p.is_await=true} +p=p.parent}} +$AwaitCtx.prototype.transition=function(token,value){var C=this +C.parent.is_await=true +return $transition(C.parent,token,value)} +$AwaitCtx.prototype.to_js=function(){return 'var save_stack = $B.save_stack();'+ +'await ($B.promise('+$to_js(this.tree)+'));'+ +'$B.restore_stack(save_stack, $locals); '} var $BodyCtx=$B.parser.$BodyCtx=function(C){ var ctx_node=C.parent while(ctx_node.type !=='node'){ctx_node=ctx_node.parent} @@ -818,25 +957,93 @@ var $BreakCtx=$B.parser.$BreakCtx=function(C){ this.type='break' this.parent=C C.tree[C.tree.length]=this -set_loop_context.apply(this,[C,'break']) -this.toString=function(){return 'break '} -this.to_js=function(){this.js_processed=true +set_loop_context.apply(this,[C,'break'])} +$BreakCtx.prototype.toString=function(){return 'break '} +$BreakCtx.prototype.transition=function(token,value){var C=this +if(token=='eol'){return $transition(C.parent,'eol')} +$_SyntaxError(C,token)} +$BreakCtx.prototype.to_js=function(){this.js_processed=true var scope=$get_scope(this) var res=';$locals_'+scope.id.replace(/\./g,'_')+ '["$no_break'+this.loop_ctx.loop_num+'"] = false' -if(this.loop_ctx.type !='asyncfor'){res+=';break'}else{res+=';throw StopIteration.$factory('+ +if(this.loop_ctx.type !='asyncfor'){res+=';break'}else{res+=';throw _b_.StopIteration.$factory('+ this.loop_ctx.loop_num+')'} -return res}} +return res} var $CallArgCtx=$B.parser.$CallArgCtx=function(C){ this.type='call_arg' this.parent=C this.start=$pos this.tree=[] C.tree.push(this) -this.expect='id' -this.toString=function(){return 'call_arg '+this.tree} -this.to_js=function(){this.js_processed=true -return $to_js(this.tree)}} +this.expect='id'} +$CallArgCtx.prototype.toString=function(){return 'call_arg '+this.tree} +$CallArgCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'await': +case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +if(C.expect=='id'){C.expect=',' +var expr=new $AbstractExprCtx(C,false) +return $transition(expr,token,value)} +break +case '=': +if(C.expect==','){return new $ExprCtx(new $KwArgCtx(C),'kw_value',false)} +break +case 'for': +if(this.parent.tree.length > 1){$_SyntaxError(C,"non-parenthesized generator expression")} +var lst=new $ListOrTupleCtx(C,'gen_expr') +lst.vars=C.vars +lst.locals=C.locals +lst.intervals=[C.start] +C.tree.pop() +lst.expression=C.tree +C.tree=[lst] +lst.tree=[] +var comp=new $ComprehensionCtx(lst) +return new $TargetListCtx(new $CompForCtx(comp)) +case 'op': +if(C.expect=='id'){var op=value +C.expect=',' +switch(op){case '+': +case '-': +case '~': +return $transition(new $AbstractExprCtx(C,false),token,op) +case '*': +return new $StarArgCtx(C) +case '**': +return new $DoubleStarArgCtx(C)}} +$_SyntaxError(C,'token '+token+' after '+C) +case ')': +if(C.parent.kwargs && +$B.last(C.parent.tree).tree[0]&& +['kwarg','star_arg','double_star_arg']. +indexOf($B.last(C.parent.tree).tree[0].type)==-1){$_SyntaxError(C,['non-keyword argument after keyword argument'])} +if(C.tree.length > 0){var son=C.tree[C.tree.length-1] +if(son.type=='list_or_tuple' && +son.real=='gen_expr'){son.intervals.push($pos)}} +return $transition(C.parent,token) +case ':': +if(C.expect==',' && +C.parent.parent.type=='lambda'){return $transition(C.parent.parent,token)} +break +case ',': +if(C.expect==','){if(C.parent.kwargs && +['kwarg','star_arg','double_star_arg']. +indexOf($B.last(C.parent.tree).tree[0].type)==-1){$_SyntaxError(C,['non-keyword argument after keyword argument'])} +return $transition(C.parent,token,value)}} +console.log('token ',token,' after ' ,C) +$_SyntaxError(C,'token '+token+' after '+C)} +$CallArgCtx.prototype.to_js=function(){this.js_processed=true +return $to_js(this.tree)} var $CallCtx=$B.parser.$CallCtx=function(C){ this.type='call' this.func=C.tree[0] @@ -849,43 +1056,72 @@ C.args=this} this.expect='id' this.tree=[] this.start=$pos -this.toString=function(){return '(call) '+this.func+'('+this.tree+')'} if(this.func && this.func.type=="attribute" && this.func.name=="wait" && this.func.value.type=="id" && this.func.value.value=="time"){console.log('call',this.func) $get_node(this).blocking={'type':'wait','call':this}} -if(this.func && this.func.value=='input'){$get_node(this).blocking={'type':'input'}} -this.to_js=function(){this.js_processed=true -if(this.tree.length > 0){if(this.tree[this.tree.length-1].tree.length==0){ -this.tree.pop()}} -var func_js=this.func.to_js() -if(this.func !==undefined){switch(this.func.value){case 'classmethod': -return 'classmethod.$factory('+$to_js(this.tree)+')' -case '$$super': -if(this.tree.length==0){ -var scope=$get_scope(this) -if(scope.ntype=='def' ||scope.ntype=='generator'){var def_scope=$get_scope(scope.C.tree[0]) -if(def_scope.ntype=='class'){new $IdCtx(this,def_scope.C.tree[0].name)}}} -if(this.tree.length==1){ -var scope=$get_scope(this) -if(scope.ntype=='def' ||scope.ntype=='generator'){var args=scope.C.tree[0].args -if(args.length > 0){var missing_id=new $IdCtx(this,args[0]) -missing_id.to_js=function(){return "[$locals['"+args[0]+"']]"}}}} -break -default: -if(this.func.type=='unary'){ -var res='$B.$getattr('+$to_js(this.tree) -switch(this.func.op){case '+': -return res+',"__pos__")()' -case '-': -return res+',"__neg__")()' -case '~': -return res+',"__invert__")()'}}} -var _block=false -var positional=[],kw_args=[],star_args=false,dstar_args=[] -this.tree.forEach(function(arg){var type -switch(arg.type){case 'star_arg': -star_args=true -positional.push([arg.tree[0].tree[0].to_js(),'*']) +if(this.func && this.func.value=='input'){$get_node(this).blocking={'type':'input'}}} +$CallCtx.prototype.toString=function(){return '(call) '+this.func+'('+this.tree+')'} +$CallCtx.prototype.transition=function(token,value){var C=this +switch(token){case ',': +if(C.expect=='id'){$_SyntaxError(C,token)} +C.expect='id' +return C +case 'await': +case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +C.expect=',' +return $transition(new $CallArgCtx(C),token,value) +case ')': +C.end=$pos +return C.parent +case 'op': +C.expect=',' +switch(value){case '-': +case '~': +case '+': +C.expect=',' +return $transition(new $CallArgCtx(C),token,value) +case '*': +C.has_star=true +return new $StarArgCtx(C) +case '**': +C.has_dstar=true +return new $DoubleStarArgCtx(C)} +$_SyntaxError(C,token) +case 'yield': +$_SyntaxError(C,token)} +return $transition(C.parent,token,value)} +$CallCtx.prototype.to_js=function(){this.js_processed=true +if(this.tree.length > 0){if(this.tree[this.tree.length-1].tree.length==0){ +this.tree.pop()}} +var func_js=this.func.to_js() +if(this.func !==undefined){switch(this.func.value){case 'classmethod': +return '_b_.classmethod.$factory('+$to_js(this.tree)+')' +default: +if(this.func.type=='unary'){ +var res='$B.$getattr('+$to_js(this.tree) +switch(this.func.op){case '+': +return res+',"__pos__")()' +case '-': +return res+',"__neg__")()' +case '~': +return res+',"__invert__")()'}}} +var _block=false +var positional=[],kw_args=[],star_args=false,dstar_args=[] +this.tree.forEach(function(arg){var type +switch(arg.type){case 'star_arg': +star_args=true +positional.push([arg.tree[0].tree[0].to_js(),'*']) break case 'double_star_arg': dstar_args.push(arg.tree[0].tree[0].to_js()) @@ -945,7 +1181,7 @@ var classes=["complex","bytes","bytearray","object","memoryview","int","float"," if($B.builtin_funcs[this.func.value]!==undefined){if(classes.indexOf(this.func.value)==-1){ return func_js+args_str}else{ return func_js+".$factory"+args_str}}} -return default_res}}} +return default_res}} var $ClassCtx=$B.parser.$ClassCtx=function(C){ this.type='class' this.parent=C @@ -955,47 +1191,71 @@ this.expect='id' var scope=this.scope=$get_scope(this) this.parent.node.parent_block=scope this.parent.node.bound={} -this.parent.node.binding={__annotations__:true} -this.toString=function(){return '(class) '+this.name+' '+this.tree+' args '+this.args} -this.set_name=function(name){this.random=$B.UUID() +this.parent.node.binding={__annotations__:true}} +$ClassCtx.prototype.toString=function(){return '(class) '+this.name+' '+this.tree+' args '+this.args} +$ClassCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.expect=='id'){C.set_name(value) +C.expect='(:' +return C} +break +case '(': +return new $CallCtx(C) +case ':': +return $BodyCtx(C)} +$_SyntaxError(C,'token '+token+' after '+C)} +$ClassCtx.prototype.set_name=function(name){var C=this.parent +this.random=$B.UUID() this.name=name this.id=C.node.module+'_'+name+'_'+this.random this.binding={} this.parent.node.id=this.id -var parent_block=scope +var scope=this.scope,parent_block=scope +var block=scope,parent_classes=[] +while(block.ntype=="class"){parent_classes.splice(0,0,block.C.tree[0].name) +block=block.parent} +this.qualname=parent_classes.concat([name]).join(".") while(parent_block.C && parent_block.C.tree[0].type=='class'){parent_block=parent_block.parent} while(parent_block.C && 'def' !=parent_block.C.tree[0].type && 'generator' !=parent_block.C.tree[0].type){parent_block=parent_block.parent} this.parent.node.parent_block=parent_block -$bind(name,this.scope,this) +$bind(name,scope,this) if(scope.is_function){if(scope.C.tree[0].locals.indexOf(name)==-1){scope.C.tree[0].locals.push(name)}}} -this.transform=function(node,rank){ +$ClassCtx.prototype.transform=function(node,rank){ this.doc_string=$get_docstring(node) +this.module=$get_module(this).module.replace(/\./g,'_') var indent='\n'+' '.repeat(node.indent+12),instance_decl=new $Node(),local_ns='$locals_'+this.id.replace(/\./g,'_'),js='var '+local_ns+' = {'+ -'__annotations__: _b_.dict.$factory()}, '+ -indent+'$locals = '+local_ns+', '+ -indent+'$local_name = "'+local_ns+'",' +'__annotations__: $B.empty_dict()}, '+ +indent+'$locals = '+local_ns new $NodeJSCtx(instance_decl,js) node.insert(0,instance_decl) var global_scope=this.scope while(global_scope.parent_block.id !=='__builtins__'){global_scope=global_scope.parent_block} var global_ns='$locals_'+global_scope.id.replace(/\./g,'_') var js=' '.repeat(node.indent+4)+ -'$top_frame = [$local_name, $locals,'+'"'+ +'$locals.$name = "'+this.name+'"'+indent+ +'$locals.$qualname = "'+this.qualname+'"'+indent+ +'$locals.$is_class = true; '+indent+ +'$locals.$line_info = "'+node.line_num+','+ +this.module+'";'+indent+ +'var $top_frame = ["'+local_ns+'", $locals,'+'"'+ global_scope.id+'", '+global_ns+']'+ -indent+'$B.frames_stack.push($top_frame)' +indent+'$locals.$f_trace = $B.enter_frame($top_frame);'+ +indent+'if($locals.$f_trace !== _b_.None){'+ +'$locals.$f_trace = $B.trace_line()}' node.insert(1,$NodeJS(js)) -node.add($NodeJS('$B.leave_frame()')) +node.add($NodeJS('if($locals.$f_trace !== _b_.None){'+ +'$B.trace_return(_b_.None)}')) +node.add($NodeJS('$B.leave_frame({$locals})')) var ret_obj=new $Node() new $NodeJSCtx(ret_obj,'return '+local_ns+';') node.insert(node.children.length,ret_obj) var run_func=new $Node() new $NodeJSCtx(run_func,')();') node.parent.insert(rank+1,run_func) -var module_name='$locals_'+ -$get_module(this).module.replace(/\./g,'_')+'.__name__' +var module_name='$locals_'+this.module+'.__name__' rank++ node.parent.insert(rank+1,$NodeJS('$'+this.name+'_'+this.random+".__module__ = "+ module_name)) @@ -1008,7 +1268,7 @@ if(this.args !==undefined){ var arg_tree=this.args.tree,args=[],kw=[] arg_tree.forEach(function(_tmp){if(_tmp.tree[0].type=='kwarg'){kw.push(_tmp.tree[0])} else{args.push(_tmp.to_js())}}) -js[pos++]=',tuple.$factory(['+args.join(',')+']),[' +js[pos++]=', _b_.tuple.$factory(['+args.join(',')+']),[' var _re=new RegExp('"','g'),_r=[],rpos=0 args.forEach(function(arg){_r[rpos++]='"'+arg.replace(_re,'\\"')+'"'}) js[pos++]=_r.join(',')+']' @@ -1017,7 +1277,7 @@ rpos=0 kw.forEach(function(_tmp){_r[rpos++]='["'+_tmp.tree[0].value+'",'+ _tmp.tree[1].to_js()+']'}) js[pos++]=',['+_r.join(',')+']'}else{ -js[pos++]=',tuple.$factory([]),[],[]'} +js[pos++]=', _b_.tuple.$factory([]),[],[]'} js[pos++]=')' var cl_cons=new $Node() new $NodeJSCtx(cl_cons,js.join('')) @@ -1025,99 +1285,137 @@ rank++ node.parent.insert(rank+1,cl_cons) rank++ var ds_node=new $Node() -js=name_ref+'.__doc__ = '+(this.doc_string ||'None')+';' +js=name_ref+'.__doc__ = '+(this.doc_string ||'_b_.None')+';' new $NodeJSCtx(ds_node,js) node.parent.insert(rank+1,ds_node) if(scope.ntype=='module'){var w_decl=new $Node() new $NodeJSCtx(w_decl,'$locals["'+this.name+'"] = '+ this.name)} -node.parent.insert(rank+2,$NodeJS("None;")) +node.parent.insert(rank+2,$NodeJS("_b_.None;")) this.transformed=true} -this.to_js=function(){this.js_processed=true -return 'var $'+this.name+'_'+this.random+' = (function()'}} +$ClassCtx.prototype.to_js=function(){this.js_processed=true +return 'var $'+this.name+'_'+this.random+' = (function()'} var $CompIfCtx=$B.parser.$CompIfCtx=function(C){ this.type='comp_if' C.parent.intervals.push($pos) this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(comp if) '+this.tree} -this.to_js=function(){this.js_processed=true -return $to_js(this.tree)}} +C.tree[C.tree.length]=this} +$CompIfCtx.prototype.toString=function(){return '(comp if) '+this.tree} +$CompIfCtx.prototype.transition=function(token,value){var C=this +return $transition(C.parent,token,value)} +$CompIfCtx.prototype.to_js=function(){this.js_processed=true +return $to_js(this.tree)} var $ComprehensionCtx=$B.parser.$ComprehensionCtx=function(C){ this.type='comprehension' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(comprehension) '+this.tree} -this.to_js=function(){this.js_processed=true +C.tree[C.tree.length]=this} +$ComprehensionCtx.prototype.toString=function(){return '(comprehension) '+this.tree} +$ComprehensionCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'if': +return new $AbstractExprCtx(new $CompIfCtx(C),false) +case 'for': +return new $TargetListCtx(new $CompForCtx(C))} +return $transition(C.parent,token,value)} +$ComprehensionCtx.prototype.to_js=function(){this.js_processed=true var intervals=[] this.tree.forEach(function(elt){intervals.push(elt.start)}) -return intervals}} +return intervals} var $CompForCtx=$B.parser.$CompForCtx=function(C){ this.type='comp_for' C.parent.intervals.push($pos) this.parent=C this.tree=[] this.expect='in' -C.tree[C.tree.length]=this -this.toString=function(){return '(comp for) '+this.tree} -this.to_js=function(){this.js_processed=true -return $to_js(this.tree)}} +C.tree[C.tree.length]=this} +$CompForCtx.prototype.toString=function(){return '(comp for) '+this.tree} +$CompForCtx.prototype.transition=function(token,value){var C=this +if(token=='in' && C.expect=='in'){C.expect=null +return new $AbstractExprCtx(new $CompIterableCtx(C),true)} +if(C.expect===null){ +return $transition(C.parent,token,value)} +$_SyntaxError(C,'token '+token+' after '+C)} +$CompForCtx.prototype.to_js=function(){this.js_processed=true +return $to_js(this.tree)} var $CompIterableCtx=$B.parser.$CompIterableCtx=function(C){ this.type='comp_iterable' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(comp iter) '+this.tree} -this.to_js=function(){this.js_processed=true -return $to_js(this.tree)}} +C.tree[C.tree.length]=this} +$CompIterableCtx.prototype.toString=function(){return '(comp iter) '+this.tree} +$CompIterableCtx.prototype.transition=function(token,value){var C=this +return $transition(C.parent,token,value)} +$CompIterableCtx.prototype.to_js=function(){this.js_processed=true +return $to_js(this.tree)} var $ConditionCtx=$B.parser.$ConditionCtx=function(C,token){ this.type='condition' this.token=token this.parent=C this.tree=[] +this.scope=$get_scope(this) if(token=='while'){this.loop_num=$loop_num++} -C.tree[C.tree.length]=this -this.toString=function(){return this.token+' '+this.tree} -this.transform=function(node,rank){var scope=$get_scope(this) -if(this.token=="while"){if(scope.ntype=="generator"){this.parent.node.loop_start=this.loop_num} -node.parent.insert(rank,$NodeJS('$locals["$no_break'+this.loop_num+'"] = true')) +C.tree[C.tree.length]=this} +$ConditionCtx.prototype.toString=function(){return this.token+' '+this.tree} +$ConditionCtx.prototype.transition=function(token,value){var C=this +if(token==':'){if(C.tree[0].type=="abstract_expr" && +C.tree[0].tree.length==0){ +$_SyntaxError(C,'token '+token+' after '+C)} +return $BodyCtx(C)} +$_SyntaxError(C,'token '+token+' after '+C)} +$ConditionCtx.prototype.transform=function(node,rank){var scope=$get_scope(this) +if(this.token=="while"){node.parent.insert(rank,$NodeJS('$locals["$no_break'+this.loop_num+'"] = true')) +var module=$get_module(this).module +if($B.last(node.children).C.tree[0].type !="return"){var js='$locals.$line_info = "'+node.line_num+ +','+module+'";if($locals.$f_trace !== _b_.None){'+ +'$B.trace_line()};_b_.None;' +node.add($NodeJS(js))} return 2}} -this.to_js=function(){this.js_processed=true +$ConditionCtx.prototype.to_js=function(){this.js_processed=true var tok=this.token if(tok=='elif'){tok='else if'} var res=[tok+'($B.$bool('] if(tok=='while'){res.push('$locals["$no_break'+this.loop_num+'"] && ')}else if(tok=='else if'){var line_info=$get_node(this).line_num+','+ $get_scope(this).id -res.push('($locals.$line_info = "'+line_info+'") && ')} +res.push('($B.set_line("'+line_info+'")) && ')} if(this.tree.length==1){res.push($to_js(this.tree)+'))')}else{ res.push(this.tree[0].to_js()+'))') if(this.tree[1].tree.length > 0){res.push('{'+this.tree[1].to_js()+'}')}} -return res.join('')}} +return res.join('')} var $ContinueCtx=$B.parser.$ContinueCtx=function(C){ this.type='continue' this.parent=C $get_node(this).is_continue=true C.tree[C.tree.length]=this -set_loop_context.apply(this,[C,'continue']) -this.toString=function(){return '(continue)'} -this.to_js=function(){this.js_processed=true -return 'continue'}} +set_loop_context.apply(this,[C,'continue'])} +$ContinueCtx.prototype.toString=function(){return '(continue)'} +$ContinueCtx.prototype.transition=function(token,value){var C=this +if(token=='eol'){return C.parent} +$_SyntaxError(C,'token '+token+' after '+C)} +$ContinueCtx.prototype.to_js=function(){this.js_processed=true +var js='continue' +if(this.loop_ctx.has_break){ +js=`$locals["$no_break${this.loop_ctx.loop_num}"]=true;${js}`} +return js} var $DebuggerCtx=$B.parser.$DebuggerCtx=function(C){ this.type='continue' this.parent=C -C.tree[C.tree.length]=this -this.toString=function(){return '(debugger)'} -this.to_js=function(){this.js_processed=true -return 'debugger'}} +C.tree[C.tree.length]=this} +$DebuggerCtx.prototype.toString=function(){return '(debugger)'} +$DebuggerCtx.prototype.transition=function(token,value){var C=this} +$DebuggerCtx.prototype.to_js=function(){this.js_processed=true +return 'debugger'} var $DecoratorCtx=$B.parser.$DecoratorCtx=function(C){ this.type='decorator' this.parent=C C.tree[C.tree.length]=this -this.tree=[] -this.toString=function(){return '(decorator) '+this.tree} -this.transform=function(node,rank){var func_rank=rank+1,children=node.parent.children,decorators=[this.tree] +this.tree=[]} +$DecoratorCtx.prototype.toString=function(){return '(decorator) '+this.tree} +$DecoratorCtx.prototype.transition=function(token,value){var C=this +if(token=='id' && C.tree.length==0){return $transition(new $AbstractExprCtx(C,false),token,value)} +if(token=='eol'){return $transition(C.parent,token)} +$_SyntaxError(C,'token '+token+' after '+C)} +$DecoratorCtx.prototype.transform=function(node,rank){var func_rank=rank+1,children=node.parent.children,decorators=[this.tree] while(1){if(func_rank >=children.length){$_SyntaxError(C,['decorator expects function'])} else if(children[func_rank].C.type=='node_js'){func_rank++}else if(children[func_rank].C.tree[0].type== 'decorator'){decorators.push(children[func_rank].C.tree[0].tree) @@ -1129,7 +1427,7 @@ var obj=children[func_rank].C.tree[0] if(obj.type=='def'){obj.decorated=true obj.alias='$dec'+$B.UUID()} var tail='',scope=$get_scope(this),ref='$locals["' -if($B._globals[scope.id]&& $B._globals[scope.id][obj.name]){var module=$get_module(this) +if(scope.globals && scope.globals.has(obj.name)){var module=$get_module(this) ref='$locals_'+module.id+'["'} ref+=obj.name+'"]' var res=ref+' = ' @@ -1139,34 +1437,17 @@ res+=(obj.decorated ? obj.alias :ref)+tail+';' $bind(obj.name,scope,this) node.parent.insert(func_rank+1,$NodeJS(res)) this.decorators=decorators} -this.to_js=function(){this.js_processed=true +$DecoratorCtx.prototype.to_js=function(){this.js_processed=true var res=[] this.decorators.forEach(function(decorator,i){res.push('var '+this.dec_ids[i]+' = '+ $to_js(decorator)+';')},this) -return res.join('')}} -var $DecoratorExprCtx=$B.parser.$DecoratorExprCtx=function(C){ -this.type='decorator_expression' -this.parent=C -C.tree[C.tree.length]=this -this.names=[] -this.tree=[] -this.is_call=false -this.toString=function(){return '(decorator expression)'} -this.to_js=function(){this.js_processed=true -var func=new $IdCtx(this,this.names[0]) -var obj=func.to_js() -this.names.slice(1).forEach(function(name){obj="_b_.getattr("+obj+", '"+name+"')"}) -if(this.tree.length > 1){ -this.tree[0].func={to_js:function(){return obj}} -return this.tree[0].to_js()} -return obj}} +return res.join('')} var $DefCtx=$B.parser.$DefCtx=function(C){this.type='def' this.name=null this.parent=C this.tree=[] this.async=C.async this.locals=[] -this.yields=[] C.tree[C.tree.length]=this this.enclosing=[] var scope=this.scope=$get_scope(this) @@ -1176,10 +1457,10 @@ var parent_block=scope while(parent_block.C && parent_block.C.tree[0].type=='class'){parent_block=parent_block.parent} while(parent_block.C && -'def' !=parent_block.C.tree[0].type && -'generator' !=parent_block.C.tree[0].type){parent_block=parent_block.parent} +'def' !=parent_block.C.tree[0].type){parent_block=parent_block.parent} this.parent.node.parent_block=parent_block var pb=parent_block +this.is_comp=pb.is_comp while(pb && pb.C){if(pb.C.tree[0].type=='def'){this.inside_function=true break} pb=pb.parent_block} @@ -1191,8 +1472,8 @@ this.positional_list=[] this.default_list=[] this.other_args=null this.other_kw=null -this.after_star=[] -this.set_name=function(name){try{name=$mangle(name,this.parent.tree[0])}catch(err){console.log(err) +this.after_star=[]} +$DefCtx.prototype.set_name=function(name){try{name=$mangle(name,this.parent.tree[0])}catch(err){console.log(err) console.log('parent',this.parent) throw err} var id_ctx=new $IdCtx(this,name) @@ -1203,13 +1484,30 @@ this.id+='_'+$B.UUID() this.parent.node.id=this.id this.parent.node.module=this.module this.binding={} -if($B._globals[this.scope.id]!==undefined && -$B._globals[this.scope.id][name]!==undefined){ -$bind(name,this.root,this)}else{$bind(name,this.scope,this)} +var scope=this.scope +if(scope.globals !==undefined && +scope.globals.has(name)){ +$bind(name,this.root,this)}else{$bind(name,scope,this)} id_ctx.bound=true if(scope.is_function){if(scope.C.tree[0].locals.indexOf(name)==-1){scope.C.tree[0].locals.push(name)}}} -this.toString=function(){return 'def '+this.name+'('+this.tree+')'} -this.transform=function(node,rank){ +$DefCtx.prototype.toString=function(){return 'def '+this.name+'('+this.tree+')'} +$DefCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.name){$_SyntaxError(C,'token '+token+' after '+C)} +C.set_name(value) +return C +case '(': +if(C.name==null){$_SyntaxError(C,"missing name in function definition")} +C.has_args=true; +return new $FuncArgs(C) +case 'annotation': +return new $AbstractExprCtx(new $AnnotationCtx(C),true) +case ':': +if(C.has_args){return $BodyCtx(C)}else{$_SyntaxError(C,"missing function parameters")} +case 'eol': +if(C.has_args){$_SyntaxError(C,"missing colon")}} +$_SyntaxError(C,'token '+token+' after '+C)} +$DefCtx.prototype.transform=function(node,rank){if(this.is_comp){$get_node(this).is_comp=true} if(this.transformed !==undefined){return} var scope=this.scope this.doc_string=$get_docstring(node) @@ -1220,7 +1518,7 @@ if(pblock.C && pblock.C.tree[0].type=="def"){this.enclosing.push(pblock)}} var pnode=this.parent.node while(pnode.parent && pnode.parent.is_def_func){this.enclosing.push(pnode.parent.parent) pnode=pnode.parent.parent} -var defaults=[],defs1=[] +var defaults=[],defs1=[],has_end_pos=false this.argcount=0 this.kwonlyargcount=0 this.kwonlyargsdefaults=[] @@ -1236,8 +1534,10 @@ var func_name1=this.func_name if(this.decorated){this.func_name='var '+this.alias func_name1=this.alias} var func_args=this.tree[1].tree -func_args.forEach(function(arg){this.args.push(arg.name) -this.varnames[arg.name]=true +func_args.forEach(function(arg){if(arg.type=='end_positional'){this.args.push("/") +slot_list.push('"/"') +has_end_pos=true}else{this.args.push(arg.name) +this.varnames[arg.name]=true} if(arg.type=='func_arg_id'){if(this.star_arg){this.kwonlyargcount++ if(arg.has_default){this.kwonlyargsdefaults.push(arg.name)}} else{this.argcount++ @@ -1249,7 +1549,8 @@ if(arg.tree.length > 0){defaults.push('"'+arg.name+'"') defs1.push(arg.name+':'+$to_js(arg.tree)) this.__defaults__.push($to_js(arg.tree))}}else if(arg.type=='func_star_arg'){if(arg.op=='*'){this.star_arg=arg.name} else if(arg.op=='**'){this.kw_arg=arg.name}} -if(arg.annotation){annotations.push(arg.name+': '+arg.annotation.to_js())}},this) +if(arg.annotation){var name=$mangle(arg.name,this) +annotations.push(name+': '+arg.annotation.to_js())}},this) slot_init='{'+slot_init.join(", ")+'}' var flags=67 if(this.star_arg){flags |=4} @@ -1261,42 +1562,36 @@ var global_scope=scope while(global_scope.parent_block && global_scope.parent_block.id !=='__builtins__'){global_scope=global_scope.parent_block} var global_ns='$locals_'+global_scope.id.replace(/\./g,'_') -var prefix=this.tree[0].to_js() -if(this.decorated){prefix=this.alias} var name=this.name+this.num var local_ns='$locals_'+this.id,h='\n'+' '.repeat(indent) js='var '+local_ns+' = {},'+ -h+'$local_name = "'+this.id+ -'",'+h+'$locals = '+local_ns+';' +h+'$locals = '+local_ns+';' var new_node=new $Node() new_node.locals_def=true new_node.func_node=node new $NodeJSCtx(new_node,js) nodes.push(new_node) -var enter_frame_nodes=[$NodeJS('var $top_frame = [$local_name, $locals,'+ -'"'+global_scope.id+'", '+global_ns+', '+name+']'),$NodeJS('$B.frames_stack.push($top_frame)'),$NodeJS('var $stack_length = $B.frames_stack.length') +var enter_frame_nodes=[$NodeJS('$locals.$line_info = "'+node.line_num+','+ +this.module+'"'),$NodeJS(`var $top_frame=["${this.id}",$locals,`+ +'"'+global_scope.id+'", '+global_ns+', '+ +(this.is_comp ? this.name :name)+']'),$NodeJS('$locals.$f_trace = $B.enter_frame($top_frame)'),$NodeJS('var $stack_length = $B.frames_stack.length;') ] -if(this.async){enter_frame_nodes.push($NodeJS("var $stack = "+ -"$B.frames_stack.slice()"))} +if(this.type=="generator"){enter_frame_nodes.push($NodeJS("$locals.$is_generator = true"))} +if(this.async){enter_frame_nodes.splice(1,0,$NodeJS(`$locals.$async="${this.id}"`))} enter_frame_nodes.forEach(function(node){node.enter_frame=true}) -nodes.push($NodeJS("var $nb_defaults = Object.keys($defaults).length,")) -nodes.push($NodeJS(" $parent = $locals.$parent")) +if(this.is_comp){nodes.push($NodeJS("var $defaults = {}"))} this.env=[] var make_args_nodes=[] -var js=this.type=='def' ? local_ns+' = $locals' :'var $ns' -js+=' = $B.args("'+this.name+'", '+ +var js=local_ns+' = $locals = $B.args("'+this.name+'", '+ this.argcount+', {'+this.slots.join(', ')+'}, '+ '['+slot_list.join(', ')+'], arguments, $defaults, '+ this.other_args+', '+this.other_kw+');' var new_node=new $Node() new $NodeJSCtx(new_node,js) make_args_nodes.push(new_node) -if(this.type=='generator'){ -js='for(var $var in $ns){$locals[$var] = $ns[$var]};' -make_args_nodes.push($NodeJS(js))} var only_positional=false if(this.other_args===null && this.other_kw===null && -this.after_star.length==0){ +this.after_star.length==0 && !has_end_pos){ only_positional=true nodes.push($NodeJS('var $len = arguments.length;')) var new_node=new $Node() @@ -1310,40 +1605,28 @@ nodes.push(else_node) var pos_len=this.slots.length var test_node=$NodeJS('if($len == '+pos_len+')') else_node.add(test_node) -if(this.type=='generator'){if(this.args.length==0){test_node.add($NodeJS('//'))} -else{this.args.forEach(function(arg){test_node.add($NodeJS('$locals["'+arg+'"] = '+ -arg))})}}else{test_node.add($NodeJS(local_ns+ -' = $locals = '+slot_init))} +test_node.add($NodeJS(local_ns+' = $locals = '+slot_init)) else_node.add($NodeJS('else if($len > '+pos_len+ '){$B.wrong_nb_args("'+this.name+'", $len, '+ pos_len+', ['+slot_list+'])}')) if(pos_len > 0){ -else_node.add($NodeJS('else if($len + $nb_defaults < '+ +else_node.add($NodeJS('else if($len + Object.keys($defaults).length < '+ pos_len+'){$B.wrong_nb_args("'+this.name+ '", $len, '+pos_len+', ['+slot_list+'])}')) subelse_node=$NodeJS("else") else_node.add(subelse_node) -if(this.type=='generator'){this.args.forEach(function(arg){subelse_node.add($NodeJS('$locals["'+arg+'"] = '+ -arg))})}else{subelse_node.add($NodeJS(local_ns+ -' = $locals = '+slot_init))} +subelse_node.add($NodeJS(local_ns+' = $locals = '+slot_init)) subelse_node.add($NodeJS("var defparams = ["+slot_list+"]")) -subelse_node.add($NodeJS("for(var i=$len; i < defparams.length"+ -";i++){$locals[defparams[i]] = $defaults[defparams[i]]}"))}}else{nodes.push(make_args_nodes[0]) +subelse_node.add($NodeJS("for(var i = $len; i < defparams.length"+ +"; i++){$locals[defparams[i]] = $defaults[defparams[i]]}"))}}else{nodes.push(make_args_nodes[0]) if(make_args_nodes.length > 1){nodes.push(make_args_nodes[1])}} nodes=nodes.concat(enter_frame_nodes) -nodes.push($NodeJS('$locals.__annotations__ = _b_.dict.$factory()')) -nodes.push($NodeJS('$top_frame[1] = $locals')) -nodes.push($NodeJS('$locals.$parent = $parent')) var is_method=scope.ntype=="class" -if(is_method){var class_name=scope.C.tree[0].name,class_block=scope.parent_block,class_ref="$locals_"+class_block.id.replace(/\./g,'_')+ -'["'+class_name+'"]' +if(is_method){class_ref="$locals_"+scope.parent_block.id.replace(/\./g,'_')+ +'.'+scope.C.tree[0].qualname this.parent.node.binding["__class__"]=true nodes.push($NodeJS("$locals.__class__ = "+class_ref))} nodes.push($NodeJS('$B.js_this = this;')) -if(this.type=="generator"){var suspension_node=$NodeJS("// suspension") -suspension_node.is_set_yield_value=true -suspension_node.num=node.num -nodes.push(suspension_node)} for(var i=nodes.length-1;i >=0;i--){node.children.splice(0,0,nodes[i])} var def_func_node=new $Node() this.params='' @@ -1351,14 +1634,22 @@ if(only_positional){this.params=Object.keys(this.varnames).join(', ')} new $NodeJSCtx(def_func_node,'') def_func_node.is_def_func=true def_func_node.module=this.module -var last_instr=node.children[node.children.length-1].C.tree[0] -if(last_instr.type !='return' && this.type !='generator'){ -var js='$B.leave_frame' +var last_node=node.children[node.children.length-1],indent=last_node.indent,last_instr=last_node.C.tree[0] +if(last_instr.type !='return'){ +js='if($locals.$f_trace !== _b_.None){\n'+ +' '.repeat(indent)+'$B.trace_return(_b_.None)\n'+ +' '.repeat(indent)+'}\n'+' '.repeat(indent) +js+='$B.leave_frame' if(this.id.substr(0,5)=='$exec'){js+='_exec'} -node.add($NodeJS(js+'();return None'))} +js+='({$locals});return _b_.None' +node.add($NodeJS(js))} +var free_vars=[] +if(this.parent.node.referenced){for(var attr in this.parent.node.referenced){if(! this.parent.node.binding[attr]){free_vars.push('"'+attr+'"')}}} node.add(def_func_node) var offset=1,indent=node.indent +if(! this.is_comp){ node.parent.insert(rank+offset++,$NodeJS(name+'.$is_func = true')) +if(this.$has_yield_in_cm){node.parent.insert(rank+offset++,$NodeJS(name+'.$has_yield_in_cm = true'))} node.parent.insert(rank+offset++,$NodeJS(name+'.$infos = {')) var __name__=this.name if(this.name.substr(0,2)=="$$"){__name__=__name__.substr(2)} @@ -1369,7 +1660,6 @@ var __qualname__=__name__ if(this.class_name){__qualname__=this.class_name+'.'+__name__} js=' __qualname__:"'+__qualname__+'",' node.parent.insert(rank+offset++,$NodeJS(js)) -if(this.type !="generator"){ if(this.otherdefaults.length > 0){var def_names=[] this.otherdefaults.forEach(function(_default){def_names.push('$defaults.'+_default)}) node.parent.insert(rank+offset++,$NodeJS(' __defaults__ : '+ @@ -1379,50 +1669,48 @@ if(this.kwonlyargsdefaults.lengh > 0){var def_names=[] this.kwonlyargsdefaults.forEach(function(_default){def_names.push('$defaults.'+_default)}) node.parent.insert(rank+offset++,$NodeJS(' __kwdefaults__ : '+ '$B.fast_tuple(['+def_names.join(', ')+']),'))}else{node.parent.insert(rank+offset++,$NodeJS(' __kwdefaults__ : '+ -'_b_.None,'))}} +'_b_.None,'))} node.parent.insert(rank+offset++,$NodeJS(' __annotations__: {'+annotations.join(',')+'},')) -node.parent.insert(rank+offset++,$NodeJS(' __dict__: {__class__: _b_.dict, $string_dict: {},'+ -'$str_hash: {}, $numeric_dict: {}, $object_dict:{}},')) -node.parent.insert(rank+offset++,$NodeJS(' __doc__: '+(this.doc_string ||'None')+',')) +node.parent.insert(rank+offset++,$NodeJS(' __dict__: $B.empty_dict(),')) +node.parent.insert(rank+offset++,$NodeJS(' __doc__: '+(this.doc_string ||'_b_.None')+',')) var root=$get_module(this) node.parent.insert(rank+offset++,$NodeJS(' __module__ : "'+root.module+'",')) for(var attr in this.binding){this.varnames[attr]=true} var co_varnames=[] -for(var attr in this.varnames){co_varnames.push('"'+attr+'"')} -var free_vars=[] -if(this.parent.node.referenced){for(var attr in this.parent.node.referenced){if(! this.parent.node.binding[attr]){free_vars.push('"'+attr+'"')}}} +for(var attr in this.varnames){co_varnames.push('"'+$B.from_alias(attr)+'"')} var CODE_MARKER='___%%%-CODE-%%%___'+this.name+this.num; var h='\n'+' '.repeat(indent+8) js=' __code__:{'+h+' co_argcount:'+this.argcount var h1=','+h+' '.repeat(4) var module=$get_module(this).module js+=h1+'co_filename:$locals_'+module.replace(/\./g,'_')+ -'["__file__"]'+ +'["__file__"] || ""'+ h1+'co_firstlineno:'+node.line_num+ h1+'co_flags:'+flags+ h1+'co_freevars: ['+free_vars+']'+ h1+'co_kwonlyargcount:'+this.kwonlyargcount+ h1+'co_name: "'+this.name+'"'+ h1+'co_nlocals: '+co_varnames.length+ -h1+'co_varnames: ['+co_varnames.join(', ')+']'+ +h1+'co_posonlyargcount: '+(this.pos_only ||0)+ +h1+'co_varnames: $B.fast_tuple(['+co_varnames.join(', ')+'])'+ h+'}\n'+' '.repeat(indent+4)+'};' -js+='None;' -node.parent.insert(rank+offset++,$NodeJS(js)) +js+='_b_.None;' +node.parent.insert(rank+offset++,$NodeJS(js))} this.default_str='{'+defs1.join(', ')+'}' -if(this.type=="def"){ -node.parent.insert(rank+offset++,new $MarkerNode('func_end:'+ -CODE_MARKER)) -var res='return '+name -if(this.async){res='return $B.make_async('+name+')'} +if(! this.is_comp){var name1=name +if(this.type=="generator"){name1=`$B.generator.$factory(${name})`} +var res='return '+name1 +if(this.async){if(this.type=="generator"){res=`return $B.async_generator.$factory(${name})`}else{res='return $B.make_async('+name1+')'}} node.parent.insert(rank+offset++,$NodeJS(res+'}')) node.parent.insert(rank+offset++,$NodeJS( -func_name1+" = "+this.name+'$'+this.num+ +this.func_name+" = "+this.name+'$'+this.num+ '('+this.default_str+')')) node.parent.insert(rank+offset++,$NodeJS( func_name1+".$set_defaults = function(value){return "+ func_name1+" = "+this.name+"$"+this.num+ -"(value)}"))} -if(this.type=='def'){var parent=node +"(value)}")) +if(this.$has_yield_in_cm){node.parent.insert(rank+offset++,$NodeJS(`${func_name1}.$has_yield_in_cm=true`))}} +var parent=node for(var pos=0;pos < parent.children.length && parent.children[pos]!==$B.last(enter_frame_nodes);pos++){} var try_node=$NodeJS('try'),children=parent.children.slice(pos+1) @@ -1430,25 +1718,35 @@ parent.insert(pos+1,try_node) children.forEach(function(child){if(child.is_def_func){child.children.forEach(function(grand_child){try_node.add(grand_child)})}else{try_node.add(child)}}) parent.children.splice(pos+2,parent.children.length) var except_node=$NodeJS('catch(err)') -if(this.async){except_node.add($NodeJS('err.$stack = $stack'))} -except_node.add($NodeJS('$B.leave_frame();throw err')) -parent.add(except_node)} +except_node.add($NodeJS('$B.set_exc(err)')) +except_node.add($NodeJS('if($locals.$f_trace !== _b_.None){'+ +'$locals.$f_trace = $B.trace_exception()}')) +except_node.add($NodeJS('$B.leave_frame({$locals});throw err')) +parent.add(except_node) this.transformed=true return offset} -this.to_js=function(func_name){this.js_processed=true +$DefCtx.prototype.to_js=function(func_name){this.js_processed=true +if(this.is_comp){return "var "+this.name+" = "+ +(this.async ? ' async ' :'')+ +"function* (expr)"} func_name=func_name ||this.tree[0].to_js() if(this.decorated){func_name='var '+this.alias} return "var "+this.name+'$'+this.num+ ' = function($defaults){'+ -(this.async ? 'async ' :'')+'function '+ -this.name+this.num+'('+this.params+')'}} +(this.async ? 'async ' :'')+'function'+ +(this.type=='generator' ? "* " :" ")+ +this.name+this.num+'('+this.params+')'} var $DelCtx=$B.parser.$DelCtx=function(C){ this.type='del' this.parent=C C.tree[C.tree.length]=this -this.tree=[] -this.toString=function(){return 'del '+this.tree} -this.to_js=function(){this.js_processed=true +this.tree=[]} +$DelCtx.prototype.toString=function(){return 'del '+this.tree} +$DelCtx.prototype.transition=function(token,value){var C=this +if(token=='eol'){return $transition(C.parent,token)} +$_SyntaxError(C,'token '+token+' after '+C)} +$DelCtx.prototype.to_js=function(){this.js_processed=true +var C=this.parent if(this.tree[0].type=='list_or_tuple'){ var res=[] this.tree[0].tree.forEach(function(elt){var subdel=new $DelCtx(C) @@ -1482,14 +1780,14 @@ js=expr.to_js() expr.func='getitem' return js case 'op': -$_SyntaxError(this,["can't delete operator"]) +$_SyntaxError(this,["cannot delete operator"]) case 'call': -$_SyntaxError(this,["can't delete function call"]) +$_SyntaxError(this,["cannot delete function call"]) case 'attribute': -return 'delattr('+expr.value.to_js()+',"'+ +return '_b_.delattr('+expr.value.to_js()+',"'+ expr.name+'")' default: -$_SyntaxError(this,["can't delete "+expr.type])}}}} +$_SyntaxError(this,["cannot delete "+expr.type])}}} var $DictOrSetCtx=$B.parser.$DictOrSetCtx=function(C){ this.type='dict_or_set' this.real='dict_or_set' @@ -1498,20 +1796,115 @@ this.closed=false this.start=$pos this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){switch(this.real){case 'dict': +C.tree[C.tree.length]=this} +$DictOrSetCtx.prototype.toString=function(){switch(this.real){case 'dict': return '(dict) {'+this.items+'}' case 'set': return '(set) {'+this.tree+'}'} return '(dict_or_set) {'+this.tree+'}'} -this.nb_dict_items=function(){var nb=0 +$DictOrSetCtx.prototype.transition=function(token,value){var C=this +if(C.closed){switch(token){case '[': +return new $AbstractExprCtx(new $SubCtx(C.parent),false) +case '(': +return new $CallArgCtx(new $CallCtx(C.parent))} +return $transition(C.parent,token,value)}else{if(C.expect==','){switch(token){case '}': +switch(C.real){case 'dict_or_set': +if(C.tree.length !=1){break} +C.real='set' +case 'set': +case 'set_comp': +case 'dict_comp': +C.items=C.tree +C.tree=[] +C.closed=true +return C +case 'dict': +if(C.nb_dict_items()% 2==0){C.items=C.tree +C.tree=[] +C.closed=true +return C}} +$_SyntaxError(C,'token '+token+ +' after '+C) +case ',': +if(C.real=='dict_or_set'){C.real='set'} +if(C.real=='dict' && +C.nb_dict_items()% 2){$_SyntaxError(C,'token '+token+ +' after '+C)} +C.expect='id' +return C +case ':': +if(C.real=='dict_or_set'){C.real='dict'} +if(C.real=='dict'){C.expect=',' +return new $AbstractExprCtx(C,false)}else{$_SyntaxError(C,'token '+token+ +' after '+C)} +case 'for': +if(C.real=='dict_or_set'){C.real='set_comp'}else{C.real='dict_comp'} +var lst=new $ListOrTupleCtx(C,'dict_or_set_comp') +lst.intervals=[C.start+1] +lst.vars=C.vars +C.tree.pop() +lst.expression=C.tree +if(C.yields){lst.expression.yields=C.yields +delete C.yields} +C.tree=[lst] +lst.tree=[] +var comp=new $ComprehensionCtx(lst) +return new $TargetListCtx(new $CompForCtx(comp))} +$_SyntaxError(C,'token '+token+' after '+C)}else if(C.expect=='id'){switch(token){case '}': +if(C.tree.length==0){ +C.items=[] +C.real='dict'}else{ +C.items=C.tree} +C.tree=[] +C.closed=true +return C +case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +C.expect=',' +var expr=new $AbstractExprCtx(C,false) +return $transition(expr,token,value) +case 'op': +switch(value){case '*': +case '**': +C.expect="," +var expr=new $AbstractExprCtx(C,false) +expr.packed=value.length +if(C.real=="dict_or_set"){C.real=value=="*" ? "set" : +"dict"}else if( +(C.real=="set" && value=="**")|| +(C.real=="dict" && value=="*")){$_SyntaxError(C,'token '+token+ +' after '+C)} +return expr +case '+': +return C +case '-': +case '~': +C.expect=',' +var left=new $UnaryCtx(C,value) +if(value=='-'){var op_expr=new $OpCtx(left,'unary_neg')}else if(value=='+'){var op_expr=new $OpCtx(left,'unary_pos')}else{var op_expr=new $OpCtx(left,'unary_inv')} +return new $AbstractExprCtx(op_expr,false)} +$_SyntaxError(C,'token '+token+ +' after '+C)} +$_SyntaxError(C,'token '+token+' after '+C)} +return $transition(C.parent,token,value)}} +$DictOrSetCtx.prototype.nb_dict_items=function(){var nb=0 this.tree.forEach(function(item){if(item.packed){nb+=2} else{nb++}}) return nb} -this.packed_indices=function(){var ixs=[] +$DictOrSetCtx.prototype.packed_indices=function(){var ixs=[] this.items.forEach(function(t,i){if(t.type=="expr" && t.packed){ixs.push(i)}}) return ixs} -this.unpack_dict=function(packed){var js="",res,first,i=0,item,elts=[] +$DictOrSetCtx.prototype.unpack_dict=function(packed){var js="",res,first,i=0,item,elts=[] while(i < this.items.length){item=this.items[i] first=i==0 if(item.type=="expr" && item.packed){res="_b_.list.$factory(_b_.dict.items("+item.to_js()+"))" @@ -1521,12 +1914,12 @@ i+=2} if(! first){res=".concat("+res+")"} js+=res} return js} -this.unpack_set=function(packed){var js="",res +$DictOrSetCtx.prototype.unpack_set=function(packed){var js="",res this.items.forEach(function(t,i){if(packed.indexOf(i)>-1){res="_b_.list.$factory("+t.to_js()+")"}else{res="["+t.to_js()+"]"} if(i > 0){res=".concat("+res+")"} js+=res}) return js} -this.to_js=function(){this.js_processed=true +$DictOrSetCtx.prototype.to_js=function(){this.js_processed=true switch(this.real){case 'dict': var packed=this.packed_indices() if(packed.length > 0){return '_b_.dict.$factory('+this.unpack_dict(packed)+ @@ -1543,41 +1936,121 @@ case 'dict_comp': return '_b_.dict.$factory('+$to_js(this.items)+')'+ $to_js(this.tree)} var packed=this.packed_indices() -if(packed.length > 0){return 'set.$factory('+this.unpack_set(packed)+')'} -return 'set.$factory(['+$to_js(this.items)+'])'+$to_js(this.tree)}} +if(packed.length > 0){return '_b_.set.$factory('+this.unpack_set(packed)+')'} +return '_b_.set.$factory(['+$to_js(this.items)+'])'+$to_js(this.tree)} var $DoubleStarArgCtx=$B.parser.$DoubleStarArgCtx=function(C){ this.type='double_star_arg' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '**'+this.tree} -this.to_js=function(){this.js_processed=true -return '{$nat:"pdict",arg:'+$to_js(this.tree)+'}'}} +C.tree[C.tree.length]=this} +$DoubleStarArgCtx.prototype.toString=function(){return '**'+this.tree} +$DoubleStarArgCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +return $transition(new $AbstractExprCtx(C,false),token,value) +case ',': +case ')': +return $transition(C.parent,token) +case ':': +if(C.parent.parent.type=='lambda'){return $transition(C.parent.parent,token)}} +$_SyntaxError(C,'token '+token+' after '+C)} +$DoubleStarArgCtx.prototype.to_js=function(){this.js_processed=true +return '{$nat:"pdict",arg:'+$to_js(this.tree)+'}'} var $EllipsisCtx=$B.parser.$EllipsisCtx=function(C){ this.type='ellipsis' this.parent=C this.nbdots=1 this.start=$pos -C.tree[C.tree.length]=this -this.toString=function(){return 'ellipsis'} -this.to_js=function(){this.js_processed=true -return '$B.builtins["Ellipsis"]'}} -var $ExceptCtx=$B.parser.$ExceptCtx=function(C){ +C.tree[C.tree.length]=this} +$EllipsisCtx.prototype.toString=function(){return 'ellipsis'} +$EllipsisCtx.prototype.transition=function(token,value){var C=this +if(token=='.'){C.nbdots++ +if(C.nbdots==3 && $pos-C.start==2){C.$complete=true} +return C}else{if(! C.$complete){$pos-- +$_SyntaxError(C,'token '+token+' after '+ +C)}else{return $transition(C.parent,token,value)}}} +$EllipsisCtx.prototype.to_js=function(){this.js_processed=true +return '$B.builtins["Ellipsis"]'} +var $EndOfPositionalCtx=$B.parser.$EndOfConditionalCtx=function(C){ +this.type="end_positional" +this.parent=C +C.has_end_positional=true +C.parent.pos_only=C.tree.length +C.tree.push(this)} +$EndOfPositionalCtx.prototype.transition=function(token,value){var C=this +if(token=="," ||token==")"){return $transition(C.parent,token,value)} +$_SyntaxError(C,'token '+token+' after '+C)} +$EndOfPositionalCtx.prototype.to_js=function(){return "/"} +var $ExceptCtx=$B.parser.$ExceptCtx=function(C){ this.type='except' this.parent=C C.tree[C.tree.length]=this this.tree=[] this.expect='id' -this.scope=$get_scope(this) -this.toString=function(){return '(except) '} -this.set_alias=function(alias){this.tree[0].alias=$mangle(alias,this) +this.scope=$get_scope(this)} +$ExceptCtx.prototype.toString=function(){return '(except) '} +$ExceptCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case 'not': +case 'lambda': +if(C.expect=='id'){C.expect='as' +return $transition(new $AbstractExprCtx(C,false),token,value)} +case 'as': +if(C.expect=='as' && +C.has_alias===undefined){C.expect='alias' +C.has_alias=true +return C} +case 'id': +if(C.expect=='alias'){C.expect=':' +C.set_alias(value) +return C} +break +case ':': +var _ce=C.expect +if(_ce=='id' ||_ce=='as' ||_ce==':'){return $BodyCtx(C)} +break +case '(': +if(C.expect=='id' && C.tree.length==0){C.parenth=true +return C} +break +case ')': +if(C.expect==',' ||C.expect=='as'){C.expect='as' +return C} +case ',': +if(C.parenth !==undefined && +C.has_alias===undefined && +(C.expect=='as' ||C.expect==',')){C.expect='id' +return C}} +$_SyntaxError(C,'token '+token+' after '+C.expect)} +$ExceptCtx.prototype.set_alias=function(alias){this.tree[0].alias=$mangle(alias,this) $bind(alias,this.scope,this)} -this.transform=function(node,rank){ +$ExceptCtx.prototype.transform=function(node,rank){ +var linenum_node=$NodeJS("void(0)") +linenum_node.line_num=node.line_num +node.insert(0,linenum_node) var last_child=$B.last(node.children) if(last_child.C.tree && last_child.C.tree[0]&& last_child.C.tree[0].type=="return"){} else{node.add($NodeJS("$B.del_exc()"))}} -this.to_js=function(){ +$ExceptCtx.prototype.to_js=function(){ this.js_processed=true switch(this.tree.length){case 0: return 'else' @@ -1590,7 +2063,7 @@ if($B.debug > 0){var module=$get_module(this) lnum='($locals.$line_info = "'+$get_node(this).line_num+ ','+module.id+'") && '} return 'else if('+lnum+'$B.is_exc('+this.error_name+ -',['+res.join(',')+']))'}} +',['+res.join(',')+']))'} var $ExprCtx=$B.parser.$ExprCtx=function(C,name,with_commas){ this.type='expr' this.name=name @@ -1598,50 +2071,271 @@ this.with_commas=with_commas this.expect=',' this.parent=C if(C.packed){this.packed=C.packed} -if(C.is_await){this.is_await=C.is_await} +if(C.is_await){var node=$get_node(this) +node.has_await=node.has_await ||[] +this.is_await=C.is_await +node.has_await.push(this)} if(C.assign){ this.assign=C.assign} this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(expr '+with_commas+') '+this.tree} -this.to_js=function(arg){var res +C.tree[C.tree.length]=this} +$ExprCtx.prototype.toString=function(){return '(expr '+this.with_commas+') '+this.tree} +$ExprCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'bytes': +case 'float': +case 'id': +case 'imaginary': +case 'int': +case 'lambda': +case 'pass': +case 'str': +$_SyntaxError(C,'token '+token+' after '+ +C) +break +case '{': +if(C.tree[0].type !="id" || +["print","exec"].indexOf(C.tree[0].value)==-1){$_SyntaxError(C,'token '+token+' after '+ +C)} +return new $DictOrSetCtx(C) +case '[': +case '(': +case '.': +case 'not': +if(C.expect=='expr'){C.expect=',' +return $transition(new $AbstractExprCtx(C,false),token,value)}} +switch(token){case 'not': +if(C.expect==','){return new $ExprNot(C)} +break +case 'in': +if(C.parent.type=='target_list'){ +return $transition(C.parent,token)} +if(C.expect==','){return $transition(C,'op','in')} +break +case ',': +if(C.expect==','){if(C.with_commas || +["assign","return"].indexOf(C.parent.type)>-1){if($parent_match(C,{type:"yield","from":true})){$_SyntaxError(C,"no implicit tuple for yield from")} +C.parent.tree.pop() +var tuple=new $ListOrTupleCtx(C.parent,'tuple') +tuple.implicit=true +tuple.has_comma=true +tuple.tree=[C] +C.parent=tuple +return tuple}} +return $transition(C.parent,token) +case '.': +return new $AttrCtx(C) +case '[': +return new $AbstractExprCtx(new $SubCtx(C),true) +case '(': +return new $CallCtx(C) +case 'op': +var op_parent=C.parent,op=value +if(op_parent.type=='ternary' && op_parent.in_else){var new_op=new $OpCtx(C,op) +return new $AbstractExprCtx(new_op,false)} +var op1=C.parent,repl=null +while(1){if(op1.type=='expr'){op1=op1.parent} +else if(op1.type=='op' && +$op_weight[op1.op]>=$op_weight[op]&& +!(op1.op=='**' && op=='**')){ +repl=op1 +op1=op1.parent}else if(op1.type=="not" && +$op_weight['not']> $op_weight[op]){repl=op1 +op1=op1.parent}else{break}} +if(repl===null){while(1){if(C.parent !==op1){C=C.parent +op_parent=C.parent}else{break}} +C.parent.tree.pop() +var expr=new $ExprCtx(op_parent,'operand',C.with_commas) +expr.expect=',' +C.parent=expr +var new_op=new $OpCtx(C,op) +return new $AbstractExprCtx(new_op,false)}else{ +if(op==='and' ||op==='or'){while(repl.parent.type=='not'|| +(repl.parent.type=='expr' && +repl.parent.parent.type=='not')){ +repl=repl.parent +op_parent=repl.parent}}} +if(repl.type=='op'){var _flag=false +switch(repl.op){case '<': +case '<=': +case '==': +case '!=': +case 'is': +case '>=': +case '>': +_flag=true} +if(_flag){switch(op){case '<': +case '<=': +case '==': +case '!=': +case 'is': +case '>=': +case '>': +var c2=repl.tree[1], +c2js=c2.to_js() +var c2_clone=new Object() +for(var attr in c2){c2_clone[attr]=c2[attr]} +var vname="$c"+chained_comp_num +c2.to_js=function(){return vname} +c2_clone.to_js=function(){return vname} +chained_comp_num++ +while(repl.parent && repl.parent.type=='op'){if($op_weight[repl.parent.op]< +$op_weight[repl.op]){repl=repl.parent}else{break}} +repl.parent.tree.pop() +var and_expr=new $OpCtx(repl,'and') +and_expr.wrap={'name':vname,'js':c2js} +c2_clone.parent=and_expr +and_expr.tree.push('xxx') +var new_op=new $OpCtx(c2_clone,op) +return new $AbstractExprCtx(new_op,false)}}} +repl.parent.tree.pop() +var expr=new $ExprCtx(repl.parent,'operand',false) +expr.tree=[op1] +repl.parent=expr +var new_op=new $OpCtx(repl,op) +return new $AbstractExprCtx(new_op,false) +case 'augm_assign': +var parent=C.parent +while(parent){if(parent.type=="assign" ||parent.type=="augm_assign"){$_SyntaxError(C,"augmented assignment inside assignment")}else if(parent.type=="op"){$_SyntaxError(C,["cannot assign to operator"])} +parent=parent.parent} +if(C.expect==','){return new $AbstractExprCtx( +new $AugmentedAssignCtx(C,value),true)} +return $transition(C.parent,token,value) +case ":": +if(C.parent.type=="sub" || +(C.parent.type=="list_or_tuple" && +C.parent.parent.type=="sub")){return new $AbstractExprCtx(new $SliceCtx(C.parent),false)}else if(C.parent.type=="slice"){return $transition(C.parent,token,value)}else if(C.parent.type=="node"){ +if(C.tree.length==1){var child=C.tree[0] +if(["id","sub","attribute"].indexOf(child.type)>-1){return new $AbstractExprCtx(new $AnnotationCtx(C),false)}else if(child.real=="tuple" && child.expect=="," && +child.tree.length==1){return new $AbstractExprCtx(new $AnnotationCtx(child.tree[0]),false)}} +$_SyntaxError(C,"invalid target for annotation")} +break +case '=': +function has_parent(ctx,type){ +while(ctx.parent){if(ctx.parent.type==type){return ctx.parent} +ctx=ctx.parent} +return false} +var annotation +if(C.expect==','){if(C.parent.type=="call_arg"){ +if(C.tree[0].type !="id"){$_SyntaxError(C,['expression cannot contain'+ +' assignment, perhaps you meant "=="?'])} +return new $AbstractExprCtx(new $KwArgCtx(C),true)}else if(annotation=has_parent(C,"annotation")){return $transition(annotation,token,value)}else if(C.parent.type=="op"){ +$_SyntaxError(C,["cannot assign to operator"])}else if(C.parent.type=="not"){ +$_SyntaxError(C,["cannot assign to operator"])}else if(C.parent.type=="list_or_tuple"){ +for(var i=0;i < C.parent.tree.length;i++){var item=C.parent.tree[i] +if(item.type=="expr" && item.name=="operand"){$_SyntaxError(C,["cannot assign to operator"])}}}else if(C.tree.length > 0 && C.tree[0].assign){ +$_SyntaxError(C,["cannot assign to named expression"])}else if(C.parent.type=="expr" && +C.parent.name=="target list"){$_SyntaxError(C,'token '+token+' after ' ++C)}else if(C.parent.type=="lambda"){if(C.parent.parent.parent.type !="node"){$_SyntaxError(C,['expression cannot contain'+ +' assignment, perhaps you meant "=="?'])}} +while(C.parent !==undefined){C=C.parent +if(C.type=="condition"){$_SyntaxError(C,'token '+token+' after ' ++C)}else if(C.type=="augm_assign"){$_SyntaxError(C,"assignment inside augmented assignment")}} +C=C.tree[0] +return new $AbstractExprCtx(new $AssignCtx(C),true)} +break +case ':=': +var ptype=C.parent.type +if(["node","assign","kwarg","annotation"]. +indexOf(ptype)>-1){$_SyntaxError(C,':= invalid, parent '+ptype)}else if(ptype=="func_arg_id" && +C.parent.tree.length > 0){ +$_SyntaxError(C,':= invalid, parent '+ptype)}else if(ptype=="call_arg" && +C.parent.parent.type=="call" && +C.parent.parent.parent.type=="lambda"){ +$_SyntaxError(C,':= invalid inside function arguments' )} +if(C.tree.length==1 && +C.tree[0].type=="id"){var scope=$get_scope(C),name=C.tree[0].value +while(scope.is_comp){scope=scope.parent_block} +$bind(name,scope,C) +var parent=C.parent +parent.tree.pop() +var assign_expr=new $AbstractExprCtx(parent,false) +assign_expr.assign=C.tree[0] +return assign_expr} +$_SyntaxError(C,'token '+token+' after '+C) +case 'if': +var in_comp=false,ctx=C.parent +while(true){if(ctx.type=="list_or_tuple"){ +break}else if(ctx.type=='comp_for' ||ctx.type=="comp_if"){in_comp=true +break}else if(ctx.type=='call_arg'){break} +if(ctx.parent !==undefined){ctx=ctx.parent} +else{break}} +if(in_comp){break} +var ctx=C +while(ctx.parent && +(ctx.parent.type=='op' || +(ctx.parent.type=="expr" && ctx.parent.name=="operand"))){ctx=ctx.parent} +return new $AbstractExprCtx(new $TernaryCtx(ctx),true) +case 'eol': +if(C.tree.length==2 && +C.tree[0].type=="id" && +["print","exec"].indexOf(C.tree[0].value)>-1){$_SyntaxError(C,["Missing parentheses in call "+ +"to '"+C.tree[0].value+"'."])} +if(["dict_or_set","list_or_tuple"].indexOf(C.parent.type)==-1){var t=C.tree[0] +if(t.type=="packed" || +(t.type=="call" && t.func.type=="packed")){$_SyntaxError(C,["can't use starred expression here"])}}} +return $transition(C.parent,token)} +$ExprCtx.prototype.to_js=function(arg){var res this.js_processed=true -if(this.type=='list'){res='['+$to_js(this.tree)+']'} -else if(this.tree.length==1){res=this.tree[0].to_js(arg)} -else{res='_b_.tuple.$factory(['+$to_js(this.tree)+'])'} -if(this.is_await){res="await $B.promise("+res+")"} -if(this.assign){console.log("expr to js, is assign",this) +if(this.type=='list'){res='['+$to_js(this.tree)+']'}else if(this.tree.length==1){res=this.tree[0].to_js(arg)}else{res='_b_.tuple.$factory(['+$to_js(this.tree)+'])'} +if(this.is_await){res="await ($B.promise("+res+"))"} +if(this.assign){ var scope=$get_scope(this) +while(scope.is_comp){scope=scope.parent_block} +if(scope.globals && scope.globals.has(this.assign.value)){ +while(scope.parent_block && +scope.parent_block.id !=="__builtins__"){scope=scope.parent_block}}else if(scope.nonlocals && +scope.nonlocals[this.assign.value]){ +scope=scope.parent_block} res="($locals_"+scope.id.replace(/\./g,'_')+'["'+ this.assign.value+'"] = '+res+')'} -return res}} +return res} var $ExprNot=$B.parser.$ExprNot=function(C){ this.type='expr_not' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(expr_not)'}} -var $FloatCtx=$B.parser.$FloatCtx=function(C,value){ -this.type='float' -this.value=value -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return 'float '+this.value} -this.to_js=function(){this.js_processed=true -if(/^\d+$/.exec(this.value)|| -/^\d+\.\d*$/.exec(this.value)){return '(new Number('+this.value+'))'} -return 'float.$factory('+this.value+')'}} +C.tree[C.tree.length]=this} +$ExprNot.prototype.transition=function(token,value){var C=this +if(token=='in'){ +C.parent.tree.pop() +return new $AbstractExprCtx( +new $OpCtx(C.parent,'not_in'),false)} +$_SyntaxError(C,'token '+token+' after '+C)} +$ExprNot.prototype.toString=function(){return '(expr_not)'} var $ForExpr=$B.parser.$ForExpr=function(C){ +if(C.node.parent.is_comp){ +C.node.parent.first_for=this} this.type='for' this.parent=C this.tree=[] C.tree[C.tree.length]=this this.loop_num=$loop_num -this.module=$get_scope(this).module -$loop_num++ -this.toString=function(){return '(for) '+this.tree} -this.transform=function(node,rank){if(this.async){return this.transform_async(node,rank)} +this.scope=$get_scope(this) +if(this.scope.is_comp){} +this.module=this.scope.module +$loop_num++} +$ForExpr.prototype.toString=function(){return '(for) '+this.tree} +$ForExpr.prototype.transition=function(token,value){var C=this +switch(token){case 'in': +if(C.tree[0].tree.length==0){ +$_SyntaxError(C,"missing target between 'for' and 'in'")} +return new $AbstractExprCtx( +new $ExprCtx(C,'target list',true),false) +case ':': +if(C.tree.length < 2 +||C.tree[1].tree[0].type=="abstract_expr"){$_SyntaxError(C,'token '+token+' after '+ +C)} +return $BodyCtx(C)} +$_SyntaxError(C,'token '+token+' after '+C)} +$ForExpr.prototype.transform=function(node,rank){ +var pnode=this.parent.node.parent +while(pnode){if(pnode.is_comp){var module=$get_module(this) +if(module.outermost_expr===undefined){pnode.outermost_expr=this.tree[1] +module.outermost_expr=this.tree[1] +this.tree.pop() +new $RawJSCtx(this,"expr")} +break} +pnode=pnode.parent} +if(this.async){return this.transform_async(node,rank)} var scope=$get_scope(this),target=this.tree[0],target_is_1_tuple=target.tree.length==1 && target.expect=='id',iterable=this.tree[1],num=this.loop_num,local_ns='$locals_'+scope.id.replace(/\./g,'_'),h='\n'+' '.repeat(node.indent+4) var $range=false if(target.tree.length==1 && @@ -1657,7 +2351,7 @@ $range=call}}} var new_nodes=[],pos=0 var children=node.children var offset=1 -if($range && scope.ntype !='generator'){if(this.has_break){ +if($range){if(this.has_break){ new_node=new $Node() new $NodeJSCtx(new_node,local_ns+'["$no_break'+num+'"] = true') new_nodes[pos++]=new_node} @@ -1668,7 +2362,8 @@ else{break}} range_is_builtin=found.length==1 && found[0]=="__builtins__" var test_range_node=new $Node() -if(range_is_builtin){new $NodeJSCtx(test_range_node,'if(1)')}else{new $NodeJSCtx(test_range_node,'if('+call.func.to_js()+' === $B.builtins.range)')} +test_range_node.module=node.parent.module +if(range_is_builtin){new $NodeJSCtx(test_range_node,'if(1)')}else{new $NodeJSCtx(test_range_node,'if('+call.func.to_js()+' === _b_.range)')} new_nodes[pos++]=test_range_node var idt=target.to_js(),shortcut=false if($range.tree.length==1){var stop=$range.tree[0].tree[0] @@ -1695,9 +2390,11 @@ for_node.add($NodeJS('if($safe'+num+'){$next'+num+ ' += 1}')) for_node.add($NodeJS('else{$next'+num+' = $B.add($next'+ num+',1)}'))} -children.forEach(function(child){for_node.add(child)}) -for_node.add($NodeJS('$locals.$line_info = "'+node.line_num+ -','+scope.id+'"; None;')) +children.forEach(function(child){for_node.add(child.clone_tree())}) +if($B.last(node.children).C.tree[0].type !="return"){var js='$locals.$line_info = "'+node.line_num+ +','+this.module+'";if($locals.$f_trace !== _b_.None){'+ +'$B.trace_line()};_b_.None;' +for_node.add($NodeJS(js))} var in_loop=false if(scope.ntype=='module'){var pnode=node.parent while(pnode){if(pnode.for_wrapper){in_loop=true;break} @@ -1741,13 +2438,10 @@ if(this.has_break){ new_nodes[pos++]=$NodeJS(local_ns+'["$no_break'+num+ '"] = true;')} var while_node=new $Node() -if(this.has_break){js='while('+local_ns+'["$no_break'+num+'"])'}else{js='while(1)'} +if(this.has_break){js='while('+local_ns+'["$no_break'+num+'"])'}else{js='while(true)'} new $NodeJSCtx(while_node,js) while_node.C.loop_num=num while_node.C.type='for' -while_node.line_num=node.line_num -if(scope.ntype=='generator'){ -while_node.loop_start=num} new_nodes[pos++]=while_node node.parent.children.splice(rank,1) if(this.test_range){for(var i=new_nodes.length-1;i >=0;i--){else_node.insert(0,new_nodes[i])}}else{for(var i=new_nodes.length-1;i >=0;i--){node.parent.insert(rank,new_nodes[i]) @@ -1767,12 +2461,18 @@ var assign=new $AssignCtx(target_expr) assign.tree[1]=new $JSCode('$locals["$next'+num+'"]()') try_node.add(iter_node) while_node.add( -$NodeJS('catch($err){if($B.is_exc($err, [StopIteration]))'+ +$NodeJS('catch($err){if($B.is_exc($err, [_b_.StopIteration]))'+ '{break;}else{throw($err)}}')) -children.forEach(function(child){while_node.add(child)}) +children.forEach(function(child){ +while_node.add(child.clone())}) +if(node.children.length==0){console.log("bizarre",this)} +if($B.last(node.children).C.tree[0].type !="return"){var js='$locals.$line_info = "'+node.line_num+ +','+this.module+'";if($locals.$f_trace !== _b_.None){'+ +'$B.trace_line()};_b_.None;' +while_node.add($NodeJS(js))} node.children=[] return 0} -this.transform_async=function(node,rank){ +$ForExpr.prototype.transform_async=function(node,rank){ var scope=$get_scope(this),target=this.tree[0],target_is_1_tuple=target.tree.length==1 && target.expect=='id',iterable=this.tree[1],num=this.loop_num,local_ns='$locals_'+scope.id.replace(/\./g,'_'),h='\n'+' '.repeat(node.indent+4) var new_nodes=[] var it_js=iterable.to_js(),iterable_name='$iter'+num,type_name='$type'+num,running_name='$running'+num,anext_name='$anext'+num,target_name='$target'+num,js='var '+iterable_name+' = '+it_js @@ -1789,14 +2489,14 @@ var while_node=$NodeJS('while('+running_name+')') new_nodes.push(while_node) var try_node=$NodeJS('try') while_node.add(try_node) -if(target.tree.length==1){var js=target.to_js()+' = await $B.promise('+ -anext_name+'('+iterable_name+'))' +if(target.tree.length==1){var js=target.to_js()+' = await ($B.promise('+ +anext_name+'('+iterable_name+')))' try_node.add($NodeJS(js))}else{var new_node=new $Node(),ctx=new $NodeCtx(new_node),expr=new $ExprCtx(ctx,"left",false) expr.tree.push(target) target.parent=expr var assign=new $AssignCtx(expr) -new $RawJSCtx(assign,'await $B.promise('+ -anext_name+'('+iterable_name+'))') +new $RawJSCtx(assign,'await ($B.promise('+ +anext_name+'('+iterable_name+')))') try_node.add(new_node)} var catch_node=$NodeJS('catch(err)') while_node.add(catch_node) @@ -1808,57 +2508,105 @@ node.parent.children.splice(rank,1) for(var i=new_nodes.length-1;i >=0;i--){node.parent.insert(rank,new_nodes[i])} node.children=[] return 0} -this.to_js=function(){this.js_processed=true +$ForExpr.prototype.to_js=function(){this.js_processed=true var iterable=this.tree.pop() -return 'for ('+$to_js(this.tree)+' in '+iterable.to_js()+')'}} +return 'for ('+$to_js(this.tree)+' in '+iterable.to_js()+')'} var $FromCtx=$B.parser.$FromCtx=function(C){ this.type='from' this.parent=C this.module='' this.names=[] -this.aliases={} C.tree[C.tree.length]=this this.expect='module' -this.scope=$get_scope(this) -this.add_name=function(name){this.names[this.names.length]=name +this.scope=$get_scope(this)} +$FromCtx.prototype.add_name=function(name){this.names[this.names.length]=name if(name=='*'){this.scope.blurred=true}} -this.bind_names=function(){ +$FromCtx.prototype.bind_names=function(){ var scope=$get_scope(this) -this.names.forEach(function(name){name=this.aliases[name]||name +this.names.forEach(function(name){if(Array.isArray(name)){name=name[1]} $bind(name,scope,this)},this)} -this.toString=function(){return '(from) '+this.module+' (import) '+this.names+ -'(as)'+this.aliases} -this.to_js=function(){this.js_processed=true -var scope=$get_scope(this),module=$get_module(this),mod=module.module,res=[],pos=0,indent=$get_node(this).indent,head=' '.repeat(indent) -var mod_elts=this.module.split(".") -for(var i=0;i < mod_elts.length;i++){module.imports[mod_elts.slice(0,i+1).join(".")]=true} -var _mod=this.module.replace(/\$/g,''),$package,packages=[] -while(_mod.length > 0){if(_mod.charAt(0)=='.'){if($package===undefined){if($B.imported[mod]!==undefined){$package=$B.imported[mod].__package__ -packages=$package.split('.')}}else{$package=$B.imported[$package] -packages.pop()} -if($package===undefined){return 'throw SystemError.$factory("Parent module \'\' '+ -'not loaded, cannot perform relative import")'}else if($package=='None'){console.log('package is None !')} -_mod=_mod.substr(1)}else{break}} -if(_mod){packages.push(_mod)} -this.module=packages.join('.') -var mod_name=this.module.replace(/\$/g,'') -res[pos++]='$B.$import("' -res[pos++]=mod_name+'",["' -res[pos++]=this.names.join('","')+'"], {' -var sep='' -for(var attr in this.aliases){res[pos++]=sep+'"'+attr+'": "'+this.aliases[attr]+'"' -sep=','} -res[pos++]='}, {}, true);' -if(this.names[0]=='*'){ -scope.blurred=true -res[pos++]='\n'+head+'for(var $attr in $B.imported["'+ -mod_name+'"]){if($attr.charAt(0) !== "_" && $attr.charAt(0) !== "$")'+ -'{$locals[$attr] = $B.imported["'+mod_name+'"][$attr]}};'}else{this.names.forEach(function(name){module.imports[this.module+'.'+name]=true +$FromCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.expect=='id'){C.add_name(value) +C.expect=',' +return C} +if(C.expect=='alias'){C.names[C.names.length-1]= +[$B.last(C.names),value] +C.expect=',' +return C} +case '.': +if(C.expect=='module'){if(token=='id'){C.module+=value} +else{C.module+='.'} +return C} +case 'import': +if(C.expect=='module'){C.expect='id' +return C} +case 'op': +if(value=='*' && C.expect=='id' +&& C.names.length==0){if($get_scope(C).ntype !=='module'){$_SyntaxError(C,["import * only allowed at module level"])} +C.add_name('*') +C.expect='eol' +return C} +case ',': +if(C.expect==','){C.expect='id' +return C} +case 'eol': +switch(C.expect){case ',': +case 'eol': +C.bind_names() +return $transition(C.parent,token) +case 'id': +$_SyntaxError(C,['trailing comma not allowed without '+ +'surrounding parentheses']) +default: +$_SyntaxError(C,['invalid syntax'])} +case 'as': +if(C.expect==',' ||C.expect=='eol'){C.expect='alias' +return C} +case '(': +if(C.expect=='id'){C.expect='id' +return C} +case ')': +if(C.expect==',' ||C.expect=='id'){C.expect='eol' +return C}} +$_SyntaxError(C,'token '+token+' after '+C)} +$FromCtx.prototype.toString=function(){return '(from) '+this.module+' (import) '+this.names} +$FromCtx.prototype.to_js=function(){this.js_processed=true +var scope=$get_scope(this),module=$get_module(this),mod=module.module,res=[],pos=0,indent=$get_node(this).indent,head=' '.repeat(indent) +if(mod.startsWith("$exec")){var frame=$B.last($B.frames_stack)[1] +if(frame.module && frame.module.__name__){mod=frame.module.__name__}} +var mod_elts=this.module.split(".") +for(var i=0;i < mod_elts.length;i++){module.imports[mod_elts.slice(0,i+1).join(".")]=true} +var _mod=this.module.replace(/\$/g,''),$package,packages=[] +while(_mod.length > 0){if(_mod.charAt(0)=='.'){if($package===undefined){if($B.imported[mod]!==undefined){$package=$B.imported[mod].__package__ +packages=$package.split('.')}}else{$package=$B.imported[$package] +packages.pop()} +if($package===undefined){return 'throw _b_.SystemError.$factory("Parent module \'\' '+ +'not loaded, cannot perform relative import")'}else if($package=='None'){console.log('package is None !')} +_mod=_mod.substr(1)}else{break}} +if(_mod){packages.push(_mod)} +this.module=packages.join('.') +var mod_name=this.module.replace(/\$/g,'') +res[pos++]='var module = $B.$import("' +res[pos++]=mod_name+'",["' +var names=[] +for(var i=0,len=this.names.length;i < len;i++){if(Array.isArray(this.names[i])){names.push(this.names[i][0])}else{names.push(this.names[i])}} +res[pos++]=names.join('","')+'"], {' +var sep='' +for(var attr in this.aliases){res[pos++]=sep+'"'+attr+'": "'+this.aliases[attr]+'"' +sep=','} +res[pos++]='}, {}, true);' +if(this.names[0]=='*'){ +scope.blurred=true +res[pos++]='\n'+head+'$B.import_all($locals, module);'}else{this.names.forEach(function(name){var alias=name +if(Array.isArray(name)){alias=name[1] +name=name[0]} +module.imports[this.module+'.'+name]=true res[pos++]='\n'+head+'$locals["'+ -(this.aliases[name]||name)+'"] = $B.$getattr($B.imported["'+ +alias+'"] = $B.$getattr($B.imported["'+ mod_name+'"], "'+name+'");'},this)} -res[pos++]='\n'+head+'None;' -return res.join('');}} +res[pos++]='\n'+head+'_b_.None;' +return res.join('');} var $FuncArgs=$B.parser.$FuncArgs=function(C){ this.type='func_args' this.parent=C @@ -1868,10 +2616,39 @@ C.tree[C.tree.length]=this this.expect='id' this.has_default=false this.has_star_arg=false -this.has_kw_arg=false -this.toString=function(){return 'func args '+this.tree} -this.to_js=function(){this.js_processed=true -return $to_js(this.tree)}} +this.has_kw_arg=false} +$FuncArgs.prototype.toString=function(){return 'func args '+this.tree} +$FuncArgs.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.has_kw_arg){$_SyntaxError(C,'duplicate keyword argument')} +if(C.expect=='id'){C.expect=',' +if(C.names.indexOf(value)>-1){$_SyntaxError(C,['duplicate argument '+value+ +' in function definition'])}} +return new $FuncArgIdCtx(C,value) +case ',': +if(C.expect==','){C.expect='id' +return C} +$_SyntaxError(C,'token '+token+' after '+ +C) +case ')': +var last=$B.last(C.tree) +if(last && last.type=="func_star_arg"){if(last.name=="*"){if(C.op=='*'){ +$_SyntaxError(C,['named arguments must follow bare *'])}else{$_SyntaxError(C,'invalid syntax')}}} +return C.parent +case 'op': +if(C.has_kw_arg){$_SyntaxError(C,'duplicate keyword argument')} +var op=value +C.expect=',' +if(op=='*'){if(C.has_star_arg){$_SyntaxError(C,'duplicate star argument')} +return new $FuncStarArgCtx(C,'*')}else if(op=='**'){return new $FuncStarArgCtx(C,'**')}else if(op=='/'){ +if(C.has_end_positional){$_SyntaxError(C,['duplicate / in function parameters'])}else if(C.has_star_arg){$_SyntaxError(C,['/ after * in function parameters'])} +return new $EndOfPositionalCtx(C)} +$_SyntaxError(C,'token '+op+' after '+C) +case ':': +if(C.parent.type=="lambda"){return $transition(C.parent,token)}} +$_SyntaxError(C,'token '+token+' after '+C)} +$FuncArgs.prototype.to_js=function(){this.js_processed=true +return $to_js(this.tree)} var $FuncArgIdCtx=$B.parser.$FuncArgIdCtx=function(C,name){ this.type='func_arg_id' this.name=name @@ -1886,10 +2663,30 @@ var ctx=C while(ctx.parent !==undefined){if(ctx.type=='def'){ctx.locals.push(name) break} ctx=ctx.parent} -this.expect='=' -this.toString=function(){return 'func arg id '+this.name+'='+this.tree} -this.to_js=function(){this.js_processed=true -return this.name+$to_js(this.tree)}} +this.expect='='} +$FuncArgIdCtx.prototype.toString=function(){return 'func arg id '+this.name+'='+this.tree} +$FuncArgIdCtx.prototype.transition=function(token,value){var C=this +switch(token){case '=': +if(C.expect=='='){C.has_default=true +var def_ctx=C.parent.parent +if(C.parent.has_star_arg){def_ctx.default_list.push(def_ctx.after_star.pop())}else{def_ctx.default_list.push(def_ctx.positional_list.pop())} +return new $AbstractExprCtx(C,false)} +break +case ',': +case ')': +if(C.parent.has_default && C.tree.length==0 && +C.parent.has_star_arg===undefined){$pos-=C.name.length +$_SyntaxError(C,['non-default argument follows default argument'])}else{return $transition(C.parent,token)} +case ':': +if(C.parent.parent.type=="lambda"){ +return $transition(C.parent.parent,":")} +if(C.has_default){ +$_SyntaxError(C,'token '+token+' after '+ +C)} +return new $AbstractExprCtx(new $AnnotationCtx(C),false)} +$_SyntaxError(C,'token '+token+' after '+C)} +$FuncArgIdCtx.prototype.to_js=function(){this.js_processed=true +return this.name+$to_js(this.tree)} var $FuncStarArgCtx=$B.parser.$FuncStarArgCtx=function(C,op){ this.type='func_star_arg' this.op=op @@ -1897,17 +2694,37 @@ this.parent=C this.node=$get_node(this) C.has_star_arg=op=='*' C.has_kw_arg=op=='**' -C.tree[C.tree.length]=this -this.toString=function(){return '(func star arg '+this.op+') '+this.name} -this.set_name=function(name){this.name=name +C.tree[C.tree.length]=this} +$FuncStarArgCtx.prototype.toString=function(){return '(func star arg '+this.op+') '+this.name} +$FuncStarArgCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.name===undefined){if(C.parent.names.indexOf(value)>-1){$_SyntaxError(C,['duplicate argument '+value+ +' in function definition'])}} +C.set_name(value) +C.parent.names.push(value) +return C +case ',': +case ')': +if(C.name===undefined){ +C.set_name('*') +C.parent.names.push('*')} +return $transition(C.parent,token) +case ':': +if(C.parent.parent.type=="lambda"){ +return $transition(C.parent.parent,":")} +if(C.name===undefined){$_SyntaxError(C,'annotation on an unnamed parameter')} +return new $AbstractExprCtx( +new $AnnotationCtx(C),false)} +$_SyntaxError(C,'token '+token+' after '+C)} +$FuncStarArgCtx.prototype.set_name=function(name){this.name=name if(this.node.binding[name]){$_SyntaxError(C,["duplicate argument '"+name+"' in function definition"])} $bind(name,this.node,this) -var ctx=C +var ctx=this.parent while(ctx.parent !==undefined){if(ctx.type=='def'){ctx.locals.push(name) break} ctx=ctx.parent} -if(op=='*'){ctx.other_args='"'+name+'"'} -else{ctx.other_kw='"'+name+'"'}}} +if(this.op=='*'){ctx.other_args='"'+name+'"'} +else{ctx.other_kw='"'+name+'"'}} var $GlobalCtx=$B.parser.$GlobalCtx=function(C){ this.type='global' this.parent=C @@ -1915,13 +2732,38 @@ this.tree=[] C.tree[C.tree.length]=this this.expect='id' this.scope=$get_scope(this) -$B._globals[this.scope.id]=$B._globals[this.scope.id]||{} -this.toString=function(){return 'global '+this.tree} -this.add=function(name){if(this.scope.annotations && this.scope.annotations.has(name)){$_SyntaxError(C,["annotated name '"+name+ +this.scope.globals=this.scope.globals ||new Set() +this.module=$get_module(this) +while(this.module.module !=this.module.id){this.module=this.module.parent_block} +this.module.binding=this.module.binding ||{}} +$GlobalCtx.prototype.toString=function(){return 'global '+this.tree} +$GlobalCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.expect=='id'){new $IdCtx(C,value) +C.add(value) +C.expect=',' +return C} +break +case ',': +if(C.expect==','){C.expect='id' +return C} +break +case 'eol': +if(C.expect==','){return $transition(C.parent,token)} +break} +$_SyntaxError(C,'token '+token+' after '+C)} +$GlobalCtx.prototype.add=function(name){if(this.scope.annotations && this.scope.annotations.has(name)){$_SyntaxError(this,["annotated name '"+name+ "' can't be global"])} -$B._globals[this.scope.id][name]=true} -this.to_js=function(){this.js_processed=true -return ''}} +this.scope.globals.add(name) +var mod=this.scope.parent_block +if(this.module.module.startsWith("$exec")){while(mod && mod.parent_block !==this.module){ +mod._globals=mod._globals ||{} +mod._globals[name]=this.module.id +delete mod.binding[name] +mod=mod.parent_block}} +this.module.binding[name]=true} +$GlobalCtx.prototype.to_js=function(){this.js_processed=true +return ''} var $IdCtx=$B.parser.$IdCtx=function(C,value){ this.type='id' this.value=$mangle(value,C) @@ -1931,7 +2773,7 @@ C.tree[C.tree.length]=this var scope=this.scope=$get_scope(this) this.blurred_scope=this.scope.blurred this.env=clone(this.scope.binding) -if(scope.ntype=="def" ||scope.ntype=="generator"){scope.referenced=scope.referenced ||{} +if(["def","generator"].indexOf(scope.ntype)>-1){scope.referenced=scope.referenced ||{} if(! $B.builtins[this.value]){scope.referenced[this.value]=true}} if(C.parent.type=='call_arg'){this.call_arg=true} var ctx=C @@ -1948,33 +2790,58 @@ else if(ctx.vars.indexOf(value)==-1){ctx.vars.push(value)} if(this.call_arg&&ctx.type=='lambda'){if(ctx.locals===undefined){ctx.locals=[value]} else{ctx.locals.push(value)}}} ctx=ctx.parent} -if(C.type=='target_list' || -(C.type=='expr' && C.parent.type=='target_list')){ +if($parent_match(C,{type:'target_list'})){ +this.no_bindings=true $bind(value,scope,this) this.bound=true} -if(scope.ntype=='def' ||scope.ntype=='generator'){ +if(["def","generator"].indexOf(scope.ntype)>-1){ var _ctx=this.parent while(_ctx){if(_ctx.type=='list_or_tuple' && _ctx.is_comp()){this.in_comp=true -return} +break} _ctx=_ctx.parent} -if(C.type=='expr' && C.parent.type=='comp_if'){ -return}else if(C.type=='global'){if(scope.globals===undefined){scope.globals=new Set([value])}else{scope.globals.add(value)}}} -this.toString=function(){return '(id) '+this.value+':'+(this.tree ||'')} -this.firstBindingScopeId=function(){ +if(C.type=='expr' && C.parent.type=='comp_if'){}else if(C.type=='global'){if(scope.globals===undefined){scope.globals=new Set([value])}else{scope.globals.add(value)}}}} +$IdCtx.prototype.toString=function(){return '(id) '+this.value+':'+(this.tree ||'')} +$IdCtx.prototype.transition=function(token,value){var C=this +switch(token){case '=': +if(C.parent.type=='expr' && +C.parent.parent !==undefined && +C.parent.parent.type=='call_arg'){return new $AbstractExprCtx( +new $KwArgCtx(C.parent),false)} +return $transition(C.parent,token,value) +case 'op': +return $transition(C.parent,token,value) +case 'id': +case 'str': +case 'int': +case 'float': +case 'imaginary': +if(["print","exec"].indexOf(C.value)>-1 ){$_SyntaxError(C,["missing parenthesis in call to '"+ +C.value+"'"])} +$_SyntaxError(C,'token '+token+' after '+ +C)} +if(this.parent.parent.type=="packed"){if(['.','[','('].indexOf(token)==-1){return this.parent.parent.transition(token,value)}} +return $transition(C.parent,token,value)} +$IdCtx.prototype.firstBindingScopeId=function(){ var scope=this.scope,found=[],nb=0 -while(scope && nb++< 20){if(scope.binding && scope.binding[this.value]){return scope.id} +while(scope){if(scope.globals && scope.globals.has(this.value)){return $get_module(this).id} +if(scope.binding && scope.binding[this.value]){return scope.id} scope=scope.parent}} -this.boundBefore=function(scope){ -var nb=0,node=$get_node(this),found=false -while(!found && node.parent && nb++< 100){var pnode=node.parent -if(pnode.bindings && pnode.bindings[this.value]){return pnode.bindings[this.value]} +$IdCtx.prototype.boundBefore=function(scope){ +var node=$get_node(this),found=false +var $test=false +if($test){console.log(this.value,"bound before") +console.log("node",node)} +while(!found && node.parent){var pnode=node.parent +if(pnode.bindings && pnode.bindings[this.value]){if($test){console.log("bound in",pnode)} +return pnode.bindings[this.value]} for(var i=0;i < pnode.children.length;i++){var child=pnode.children[i] if(child===node){break} -if(child.bindings && child.bindings[this.value]){return child.bindings[this.value]}} +if(child.bindings && child.bindings[this.value]){if($test){console.log("bound in child",child)} +return child.bindings[this.value]}} if(pnode===scope){break} node=pnode} return found} -this.bindingType=function(scope){ +$IdCtx.prototype.bindingType=function(scope){ var nb=0,node=$get_node(this),found=false,unknown,ix while(!found && node.parent && nb++< 100){var pnode=node.parent if(pnode.bindings && pnode.bindings[this.value]){return pnode.bindings[this.value]} @@ -1989,10 +2856,18 @@ return unknown ||found} if(pnode===scope){break} node=pnode} return found} -this.to_js=function(arg){ +$IdCtx.prototype.to_js=function(arg){ if(this.result !==undefined && this.scope.ntype=='generator'){return this.result} -this.js_processed=true var val=this.value +var $test=false +if($test){console.log("this",this)} +if(val=='__BRYTHON__' ||val=='$B'){return val} +if(val.startsWith("comp_result_"+$B.lambda_magic)){if(this.bound){return "var "+val} +return val} +this.js_processed=true +if(this.scope._globals && this.scope._globals[val]){this.global_module=this.scope._globals[val]} +if(this.global_module){return '$locals_'+this.global_module.replace(/\./g,"_")+ +'["'+val+'"]'} var is_local=this.scope.binding[val]!==undefined,this_node=$get_node(this),bound_before=this_node.bound_before this.nonlocal=this.scope.nonlocals && this.scope.nonlocals[val]!==undefined @@ -2003,31 +2878,31 @@ if((!this.bound)&& this.scope.C this.scope.C.tree[0].name==val){ return '$B.$search("'+val+'")'} if(this.unbound && !this.nonlocal){if(this.scope.ntype=='def' ||this.scope.ntype=='generator'){return '$B.$local_search("'+val+'")'}else{return '$B.$search("'+val+'")'}} -if(val=='__BRYTHON__' ||val=='$B'){return val} var innermost=$get_scope(this),scope=innermost,found=[] var search_ids=['"'+innermost.id+'"'] var gs=innermost -var $test=false -if($test){console.log("this",this) -console.log("innermost",innermost)} while(true){if($test){console.log(gs.id,gs)} if(gs.parent_block){if(gs.parent_block==$B.builtins_scope){break} else if(gs.parent_block.id===undefined){break} gs=gs.parent_block} search_ids.push('"'+gs.id+'"')} search_ids="["+search_ids.join(", ")+"]" -if(innermost.globals && innermost.globals.has(val)){search_ids=['"'+gs.id+'"']} +if(innermost.globals && innermost.globals.has(val)){search_ids=['"'+gs.id+'"'] +innermost=gs} if($test){console.log("search ids",search_ids)} if(this.nonlocal ||this.bound){var bscope=this.firstBindingScopeId() if($test){console.log("binding",bscope)} if(bscope !==undefined){return "$locals_"+bscope.replace(/\./g,"_")+'["'+ -val+'"]'}} +val+'"]'}else if(this.bound){return "$locals_"+innermost.id.replace(/\./g,"_")+ +'["'+val+'"]'}} var global_ns='$locals_'+gs.id.replace(/\./g,'_') -while(1){if($B._globals[scope.id]!==undefined && -$B._globals[scope.id][val]!==undefined){ -if(this.boundBefore(gs)){return global_ns+'["'+val+'"]'}else{if($test){console.log("use global search",this)} +while(1){if(scope.globals !==undefined && +scope.globals.has(val)){if($test){console.log("in globals of",scope.id)} +if(this.boundBefore(gs)){if($test){console.log("bound before in gs",gs)} +return global_ns+'["'+val+'"]'}else{if($test){console.log("use global search",this)} if(this.augm_assign){return global_ns+'["'+val+'"]'}else{return '$B.$global_search("'+val+'", '+ search_ids+')'}}} +if($test){console.log("scope",scope,"innermost",innermost,scope===innermost,"bound_before",bound_before,"found",found.slice())} if(scope===innermost){ if(bound_before){if(bound_before.indexOf(val)>-1){found.push(scope)} else if(scope.C && @@ -2037,23 +2912,30 @@ if(this_node.locals[val]===undefined){ if(!scope.is_comp && (!scope.parent_block || !scope.parent_block.is_comp)){ -found.push(scope)}}else{found.push(scope)}}}}else{if(scope.binding===undefined){console.log("scope",scope,val,"no binding",innermost)} +found.push(scope)}}else{found.push(scope) +break} +if($test){console.log(val,"found in",scope.id)}}}}else{if(scope.binding===undefined){console.log("scope",scope,val,"no binding",innermost)} if(scope.binding[val]){found.push(scope)}} if(scope.parent_block){scope=scope.parent_block} else{break}} this.found=found -if($test){console.log("found",found)} +if($test){console.log("found",found) +found.forEach(function(item){console.log(item.id)})} if(this.nonlocal && found[0]===innermost){found.shift()} if(found.length > 0){ if(found[0].C && found[0]===innermost && val.charAt(0)!='$'){var locs=this_node.locals ||{},nonlocs=innermost.nonlocals -if(locs[val]===undefined && +try{if(locs[val]===undefined && +! this.augm_assign && ((innermost.type !='def' || innermost.type !='generator')&& innermost.ntype !='class' && +innermost.C.tree[0].args && innermost.C.tree[0].args.indexOf(val)==-1)&& -(nonlocs===undefined ||nonlocs[val]===undefined)){this.result='$B.$local_search("'+val+'")' -return this.result}} +(nonlocs===undefined ||nonlocs[val]===undefined)){if($test){console.log("$local search",val,"found",found,"innermost",innermost,"this",this)} +this.result='$B.$local_search("'+val+'")' +return this.result}}catch(err){console.log("error",val,innermost) +throw err}} if(found.length > 1 && found[0].C){if(found[0].C.tree[0].type=='class'){var ns0='$locals_'+found[0].id.replace(/\./g,'_'),ns1='$locals_'+found[1].id.replace(/\./g,'_'),res if(bound_before){if(bound_before.indexOf(val)>-1){this.found=found[0].binding[val] res=ns0}else{this.found=found[1].binding[val] @@ -2067,57 +2949,77 @@ return this.result}}} var scope=found[0] this.found=scope.binding[val] var scope_ns='$locals_'+scope.id.replace(/\./g,'_') -if(scope.C===undefined){ +if(scope.C===undefined){if($test){console.log("module level",scope.id,scope.module)} if(scope.id=='__builtins__'){if(gs.blurred){ -val='('+global_ns+'["'+val+'"] || '+val+')'}else{ -if(val !=='__builtins__'){val='$B.builtins.'+val} -this.is_builtin=true}}else if(scope.id==scope.module){ +val='('+global_ns+'["'+val+'"] || _b_.'+val+')'}else{ +val='_b_.'+val +this.is_builtin=true}}else{ +if($test){console.log("name found at module level")} if(this.bound ||this.augm_assign){ val=scope_ns+'["'+val+'"]'}else{if(scope===innermost && this.env[val]===undefined){ this.result='$B.$search("'+val+'")' -return this.result}else{if(this.boundBefore(scope)){ +return this.result}else{if($test){console.log("boudn before ?",scope,this.boundBefore(scope))} +if(this.boundBefore(scope)){ val=scope_ns+'["'+val+'"]'}else{ -if($test){console.log("use check def")} +if($test){console.log("use check def",scope)} val='$B.$check_def("'+val+'",'+ -scope_ns+'["'+val+'"])'}}}}else{val=scope_ns+'["'+val+'"]'}}else if(scope===innermost){if($B._globals[scope.id]&& $B._globals[scope.id][val]){val=global_ns+'["'+val+'"]'}else if(!this.bound && !this.augm_assign){ +scope_ns+'["'+val+'"])'}}}}}else if(scope===innermost){if($test){console.log("scope is innermost",scope.id)} +if(scope.globals && scope.globals.has(val)){val=global_ns+'["'+val+'"]'}else if(!this.bound && !this.augm_assign){ if(this.boundBefore(scope)){val='$locals["'+val+'"]'}else{val='$B.$check_def_local("'+val+'",$locals["'+ val+'"])'}}else{val='$locals["'+val+'"]'}}else if(!this.augm_assign){ -if(scope.ntype=='generator'){ -var up=0, -sc=innermost -while(sc !==scope){up++;sc=sc.parent_block} -var scope_name="$B.frames_stack[$B.frames_stack.length-1-"+ -up+"][1]" -val='$B.$check_def_free1("'+val+'", "'+ -scope.id.replace(/\./g,"_")+'")'}else{val='$B.$check_def_free("'+val+'",'+scope_ns+ -'["'+val+'"])'}}else{val=scope_ns+'["'+val+'"]'} +val='$B.$check_def_free("'+val+'",'+scope_ns+ +'["'+val+'"])'}else{val=scope_ns+'["'+val+'"]'} this.result=val+$to_js(this.tree,'') return this.result}else{ this.unknown_binding=true this.result='$B.$global_search("'+val+'", '+search_ids+')' -return this.result}}} -var $ImaginaryCtx=$B.parser.$ImaginaryCtx=function(C,value){ -this.type='imaginary' -this.value=value -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return 'imaginary '+this.value} -this.to_js=function(){this.js_processed=true -return '$B.make_complex(0,'+this.value+')'}} +return this.result}} var $ImportCtx=$B.parser.$ImportCtx=function(C){ this.type='import' this.parent=C this.tree=[] C.tree[C.tree.length]=this -this.expect='id' -this.toString=function(){return 'import '+this.tree} -this.bind_names=function(){ +this.expect='id'} +$ImportCtx.prototype.toString=function(){return 'import '+this.tree} +$ImportCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.expect=='id'){new $ImportedModuleCtx(C,value) +C.expect=',' +return C} +if(C.expect=='qual'){C.expect=',' +C.tree[C.tree.length-1].name+= +'.'+value +C.tree[C.tree.length-1].alias+= +'.'+value +return C} +if(C.expect=='alias'){C.expect=',' +C.tree[C.tree.length-1].alias= +value +return C} +break +case '.': +if(C.expect==','){C.expect='qual' +return C} +break +case ',': +if(C.expect==','){C.expect='id' +return C} +break +case 'as': +if(C.expect==','){C.expect='alias' +return C} +break +case 'eol': +if(C.expect==','){C.bind_names() +return $transition(C.parent,token)} +break} +$_SyntaxError(C,'token '+token+' after '+C)} +$ImportCtx.prototype.bind_names=function(){ var scope=$get_scope(this) this.tree.forEach(function(item){if(item.name==item.alias){var name=item.name,parts=name.split('.'),bound=name if(parts.length>1){bound=parts[0]}}else{bound=item.alias} $bind(bound,scope,this)},this)} -this.to_js=function(){this.js_processed=true +$ImportCtx.prototype.to_js=function(){this.js_processed=true var scope=$get_scope(this),res=[],module=$get_module(this) this.tree.forEach(function(item){var mod_name=item.name,aliases=(item.name==item.alias)? '{}' :('{"'+mod_name+'" : "'+ @@ -2126,30 +3028,21 @@ for(var i=0;i < mod_elts.length;i++){module.imports[mod_elts.slice(0,i+1).join(" var js='$B.$import("'+mod_name+'", [],'+aliases+ ','+localns+', true);' res.push(js)}) -return res.join('')+'None;'}} +return res.join('')+'_b_.None;'} var $ImportedModuleCtx=$B.parser.$ImportedModuleCtx=function(C,name){this.type='imported module' this.parent=C this.name=name this.alias=name -C.tree[C.tree.length]=this -this.toString=function(){return ' (imported module) '+this.name} -this.to_js=function(){this.js_processed=true -return '"'+this.name+'"'}} -var $IntCtx=$B.parser.$IntCtx=function(C,value){ -this.type='int' -this.value=value -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return 'int '+this.value} -this.to_js=function(){this.js_processed=true -var v=parseInt(value[1],value[0]) -if(v > $B.min_int && v < $B.max_int){return v} -else{return '$B.long_int.$factory("'+value[1]+'", '+value[0]+')'}}} -var $JSCode=$B.parser.$JSCode=function(js){this.js=js -this.toString=function(){return this.js} -this.to_js=function(){this.js_processed=true -return this.js}} +C.tree[C.tree.length]=this} +$ImportedModuleCtx.prototype.toString=function(){return ' (imported module) '+this.name} +$ImportedModuleCtx.prototype.transition=function(token,value){var C=this} +$ImportedModuleCtx.prototype.to_js=function(){this.js_processed=true +return '"'+this.name+'"'} +var $JSCode=$B.parser.$JSCode=function(js){this.js=js} +$JSCode.prototype.toString=function(){return this.js} +$JSCode.prototype.transition=function(token,value){var C=this} +$JSCode.prototype.to_js=function(){this.js_processed=true +return this.js} var $KwArgCtx=$B.parser.$KwArgCtx=function(C){ this.type='kwarg' this.parent=C.parent @@ -2161,14 +3054,17 @@ var value=this.tree[0].value var ctx=C.parent.parent if(ctx.kwargs===undefined){ctx.kwargs=[value]} else if(ctx.kwargs.indexOf(value)==-1){ctx.kwargs.push(value)} -else{$_SyntaxError(C,['keyword argument repeated'])} -this.toString=function(){return 'kwarg '+this.tree[0]+'='+this.tree[1]} -this.to_js=function(){this.js_processed=true +else{$_SyntaxError(C,['keyword argument repeated'])}} +$KwArgCtx.prototype.toString=function(){return 'kwarg '+this.tree[0]+'='+this.tree[1]} +$KwArgCtx.prototype.transition=function(token,value){var C=this +if(token==','){return new $CallArgCtx(C.parent.parent)} +return $transition(C.parent,token)} +$KwArgCtx.prototype.to_js=function(){this.js_processed=true var key=this.tree[0].value if(key.substr(0,2)=='$$'){key=key.substr(2)} var res='{$nat:"kw",name:"'+key+'",' return res+'value:'+ -$to_js(this.tree.slice(1,this.tree.length))+'}'}} +$to_js(this.tree.slice(1,this.tree.length))+'}'} var $LambdaCtx=$B.parser.$LambdaCtx=function(C){ this.type='lambda' this.parent=C @@ -2177,23 +3073,44 @@ this.tree=[] this.args_start=$pos+6 this.vars=[] this.locals=[] -this.toString=function(){return '(lambda) '+this.args_start+' '+this.body_start} -this.to_js=function(){this.js_processed=true -var node=$get_node(this),module=$get_module(this),src=$get_src(C),args=src.substring(this.args_start,this.body_start),body=src.substring(this.body_start+1,this.body_end) +this.node=$get_node(this) +this.node.binding={} +this.positional_list=[] +this.default_list=[] +this.other_args=null +this.other_kw=null +this.after_star=[]} +$LambdaCtx.prototype.toString=function(){return '(lambda) '+this.args_start+' '+this.body_start} +$LambdaCtx.prototype.transition=function(token,value){var C=this +if(token==':' && C.args===undefined){C.args=C.tree +C.tree=[] +C.body_start=$pos +return new $AbstractExprCtx(C,false)}if(C.args !==undefined){ +C.body_end=$pos +return $transition(C.parent,token)} +if(C.args===undefined && token !="("){return $transition(new $FuncArgs(C),token,value)} +$_SyntaxError(C,'token '+token+' after '+C)} +$LambdaCtx.prototype.to_js=function(){this.js_processed=true +var C=this.parent,node=this.node,module=$get_module(this),src=$get_src(C),args=src.substring(this.args_start,this.body_start),body=src.substring(this.body_start+1,this.body_end) body=body.replace(/\\\n/g,' ') body=body.replace(/\n/g,' ') var scope=$get_scope(this) -var rand=$B.UUID(),func_name='lambda_'+$B.lambda_magic+'_'+rand,py='def '+func_name+'('+args+'):\n' +var rand=$B.UUID(),func_name='lambda_'+$B.lambda_magic+'_'+rand,py='def '+func_name+'('+args+'):' py+=' return '+body var lambda_name='lambda'+rand,module_name=module.id.replace(/\./g,'_') var root=$B.py2js(py,module_name,lambda_name,scope,node.line_num) var js=root.to_js() -js='(function($locals_'+lambda_name+'){\n'+js+ -'\nreturn $locals.'+func_name+'\n})({})' +var params=`$locals_${lambda_name}`,args="{}" +if(module.is_comp){ +params+=`,$locals_${module.id.replace(/\./g,'_')}` +args+=`,typeof $locals_${module.id.replace(/\./g,'_')}`+ +`==="undefined" ?{}:$locals_${module.id.replace(/\./g,'_')}`} +js=`(function(${params}){\n`+js+ +`\nreturn $locals.${func_name}})(${args})` $B.clear_ns(lambda_name) $B.$py_src[lambda_name]=null delete $B.$py_src[lambda_name] -return js}} +return js} var $ListOrTupleCtx=$B.parser.$ListOrTupleCtx=function(C,real){ this.type='list_or_tuple' this.start=$pos @@ -2202,8 +3119,8 @@ this.expect='id' this.closed=false this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){switch(this.real){case 'list': +C.tree[C.tree.length]=this} +$ListOrTupleCtx.prototype.toString=function(){switch(this.real){case 'list': return '(list) ['+this.tree+']' case 'list_comp': case 'gen_expr': @@ -2211,63 +3128,180 @@ return '('+this.real+') ['+this.intervals+'-'+ this.tree+']' default: return '(tuple) ('+this.tree+')'}} -this.is_comp=function(){switch(this.real){case 'list_comp': +$ListOrTupleCtx.prototype.transition=function(token,value){var C=this +if(C.closed){if(token=='['){return new $AbstractExprCtx( +new $SubCtx(C.parent),false)} +if(token=='('){return new $CallCtx(C.parent)} +return $transition(C.parent,token,value)}else{if(C.expect==','){switch(C.real){case 'tuple': case 'gen_expr': +if(token==')'){var close=true +while(C.type=="list_or_tuple" && +C.real=="tuple" && +C.parent.type=="expr" && +C.parent.parent.type=="node" && +C.tree.length==1){ +close=false +var node=C.parent.parent,ix=node.tree.indexOf(C.parent),expr=C.tree[0] +expr.parent=node +expr.$in_parens=true +node.tree.splice(ix,1,expr) +C=expr.tree[0]} +if(close){C.close()} +if(C.real=='gen_expr'){ +if(C.expression.yields){for(const _yield of C.expression.yields){$pos=_yield[1] +$_SyntaxError(C,["'yield' inside generator expression"])}} +C.intervals.push($pos)} +if(C.parent.type=="packed"){return C.parent.parent} +if(C.parent.type=="abstract_expr" && +C.parent.assign){ +C.parent.parent.tree.pop() +var expr=new $ExprCtx(C.parent.parent,"assign",false) +expr.tree=C.parent.tree +expr.tree[0].parent=expr +expr.assign=C.parent.assign +return expr} +return C.parent} +break +case 'list': +case 'list_comp': +if(token==']'){C.close() +if(C.real=='list_comp'){ +if(C.expression.yields){for(const _yield of C.expression.yields){$pos=_yield[1] +$_SyntaxError(C,["'yield' inside list comprehension"])}} +C.intervals.push($pos)} +if(C.parent.type=="packed"){if(C.parent.tree.length > 0){return C.parent.tree[0]}else{return C.parent.parent}} +return C.parent} +break case 'dict_or_set_comp': -return true} -return false} -this.get_src=function(){ -var src=$get_module(this).src -var scope=$get_scope(this) -if(scope.comments===undefined){return src} -scope.comments.forEach(function(comment){var start=comment[0],len=comment[1] -src=src.substr(0,start)+' '.repeat(len+1)+ -src.substr(start+len+1)}) -return src} -this.bind_ids=function(scope){ -this.tree.forEach(function(item){if(item.type=='id'){$bind(item.value,scope,this) -item.bound=true}else if(item.type=='expr' && item.tree[0].type=="id"){$bind(item.tree[0].value,scope,this) -item.tree[0].bound=true}else if(item.type=='expr' && item.tree[0].type=="packed"){if(item.tree[0].tree[0].type=='id'){$bind(item.tree[0].tree[0].value,scope,this) -item.tree[0].tree[0].bound=true}}else if(item.type=='list_or_tuple' || -(item.type=="expr" && -item.tree[0].type=='list_or_tuple')){if(item.type=="expr"){item=item.tree[0]} -item.bind_ids(scope)}},this)} -this.packed_indices=function(){var ixs=[] +if(token=='}'){ +if(C.expression.yields){for(const _yield of C.expression.yields){$pos=_yield[1] +var comp_type=C.parent.real=="set_comp" ? +"set" :"dict" +$_SyntaxError(C,[`'yield' inside ${comp_type}comprehension`])}} +C.intervals.push($pos) +return $transition(C.parent,token)} +break} +switch(token){case ',': +if(C.real=='tuple'){C.has_comma=true} +C.expect='id' +return C +case 'for': +if(C.real=='list'){if(this.tree.length > 1){ +$_SyntaxError(C,"unparenthesized "+ +"expression before 'for'")} +C.real='list_comp'} +else{C.real='gen_expr'} +C.intervals=[C.start+1] +C.expression=C.tree +if(C.yields){C.expression.yields=C.yields +delete C.yields} +C.tree=[] +var comp=new $ComprehensionCtx(C) +return new $TargetListCtx(new $CompForCtx(comp))} +return $transition(C.parent,token,value)}else if(C.expect=='id'){switch(C.real){case 'tuple': +if(token==')'){C.close() +return C.parent} +if(token=='eol' && C.implicit===true){C.close() +return $transition(C.parent,token)} +break +case 'gen_expr': +if(token==')'){C.close() +return $transition(C.parent,token)} +break +case 'list': +if(token==']'){C.close() +return C} +break} +switch(token){case '=': +if(C.real=='tuple' && +C.implicit===true){C.close() +C.parent.tree.pop() +var expr=new $ExprCtx(C.parent,'tuple',false) +expr.tree=[C] +C.parent=expr +return $transition(C.parent,token)} +break +case ')': +break +case ']': +if(C.real=='tuple' && +C.implicit===true){ +return $transition(C.parent,token,value)}else{break} +case ',': +$_SyntaxError(C,'unexpected comma inside list') +default: +C.expect=',' +var expr=new $AbstractExprCtx(C,false) +return $transition(expr,token,value)}}else{return $transition(C.parent,token,value)}}} +$ListOrTupleCtx.prototype.close=function(){this.closed=true +for(var i=0,len=this.tree.length;i < len;i++){ +var elt=this.tree[i] +if(elt.type=="expr" && +elt.tree[0].type=="list_or_tuple" && +elt.tree[0].real=="tuple" && +elt.tree[0].tree.length==1 && +elt.tree[0].expect==","){this.tree[i]=elt.tree[0].tree[0] +this.tree[i].parent=this}}} +$ListOrTupleCtx.prototype.is_comp=function(){switch(this.real){case 'list_comp': +case 'gen_expr': +case 'dict_or_set_comp': +return true} +return false} +$ListOrTupleCtx.prototype.get_src=function(){ +var src=$get_module(this).src +var scope=$get_scope(this) +if(scope.comments===undefined){return src} +scope.comments.forEach(function(comment){var start=comment[0],len=comment[1] +src=src.substr(0,start)+' '.repeat(len+1)+ +src.substr(start+len+1)}) +return src} +$ListOrTupleCtx.prototype.bind_ids=function(scope){ +this.tree.forEach(function(item){if(item.type=='id'){$bind(item.value,scope,this) +item.bound=true}else if(item.type=='expr' && item.tree[0].type=="id"){$bind(item.tree[0].value,scope,this) +item.tree[0].bound=true}else if(item.type=='expr' && item.tree[0].type=="packed"){var ctx=item.tree[0].tree[0] +if(ctx.type=='expr' && ctx.tree[0].type=='id'){$bind(ctx.tree[0].value,scope,this) +ctx.tree[0].bound=true}}else if(item.type=='list_or_tuple' || +(item.type=="expr" && +item.tree[0].type=='list_or_tuple')){if(item.type=="expr"){item=item.tree[0]} +item.bind_ids(scope)}},this)} +$ListOrTupleCtx.prototype.packed_indices=function(){var ixs=[] for(var i=0;i < this.tree.length;i++){var t=this.tree[i] if(t.type=="expr"){t=t.tree[0] if(t.type=="packed" || (t.type=="call" && t.func.type=="packed")){ixs.push(i)}}} return ixs} -this.unpack=function(packed){var js="",res +$ListOrTupleCtx.prototype.unpack=function(packed){var js="",res for(var i=0;i < this.tree.length;i++){if(packed.indexOf(i)>-1){res="_b_.list.$factory("+this.tree[i].to_js()+")"}else{res="["+this.tree[i].to_js()+"]"} if(i > 0){res=".concat("+res+")"} js+=res} return js} -this.to_js=function(){this.js_processed=true +$ListOrTupleCtx.prototype.to_js=function(){this.js_processed=true var scope=$get_scope(this),sc=scope,scope_id=scope.id.replace(/\//g, '_'), - pos = 0 - var root = $get_module(this), - module_name = root.module - switch(this.real) { - case 'list': - var packed = this.packed_indices() - if(packed.length > 0){ - return '$B.$list(' + this.unpack(packed) + ')' - } - return '$B.$list([' + $to_js(this.tree) + '])' - case 'list_comp': - case 'gen_expr': - case 'dict_or_set_comp': - var src = this.get_src() - var res1 = [], items = [] - var qesc = new RegExp('"', "g") //to escape double quotes in arguments + pos = 0 + var root = $get_module(this), + module_name = root.module + switch(this.real) { + case 'list': + var packed = this.packed_indices() + if(packed.length > 0){ + return '$B.$list(' + this.unpack(packed) + ')' + } + return '$B.$list([' + $to_js(this.tree) + '])' + case 'list_comp': + case 'gen_expr': + case 'dict_or_set_comp': + var src = this.get_src() + var res1 = [], items = [] + var qesc = new RegExp('"', "g") //to escape double quotes in arguments var comments=root.comments for(var i=1;i < this.intervals.length;i++){var start=this.intervals[i-1],end=this.intervals[i],txt=src.substring(start,end) -comments.forEach(function(comment){if(comment[0]> start && comment[0]< end){ +for(var j=comments.length-1;j >=0;j--){var comment=comments[j] +if(comment[0]> start && comment[0]< end){ var pos=comment[0]-start txt=txt.substr(0,pos)+ ' '.repeat(comment[1])+ -txt.substr(pos+comment[1]+1)}}) +txt.substr(pos+comment[1]+1)}} +txt=txt.replace(/\\\n/g," ") items.push(txt) var lines=txt.split('\n') var res2=[] @@ -2280,26 +3314,31 @@ res1.push('['+res2.join(',')+']')} var line_num=$get_node(this).line_num switch(this.real){case 'list_comp': var lc=$B.$list_comp(items), -py=lc[0],ix=lc[1],listcomp_name='lc'+ix,save_pos=$pos -var root=$B.py2js({src:py,is_comp:true},module_name,listcomp_name,scope,1) +py=lc[0],ix=lc[1],listcomp_name='lc'+$B.lambda_magic+ix,save_pos=$pos,line_info=line_num+','+module_name +var root=$B.py2js( +{src:py,is_comp:true,line_info:line_info},module_name,listcomp_name,scope,1) +var outermost_expr=root.outermost_expr +if($get_node(this).has_yield){outermost_expr=this.tree[0].tree[0].tree[1]} +if(outermost_expr===undefined){outermost_expr=root.first_for.tree[1]} +var outer_most=outermost_expr.to_js() $pos=save_pos var js=root.to_js() root=null $B.clear_ns(listcomp_name) delete $B.$py_src[listcomp_name] -js+='return $locals_lc'+ix+'["x'+ix+'"]' -js='(function($locals_'+listcomp_name+'){'+ -js+'})({})' -return js +js+='return $locals_'+listcomp_name+'["x'+ix+'"]' +js=`function(expr){${js}})(${outer_most})` +if(this.is_await){js='async '+js} +return '('+js case 'dict_or_set_comp': -if(this.expression.length==1){return $B.$gen_expr(module_name,scope,items,line_num)} +if(this.expression.length==1){return $B.$gen_expr(module_name,scope,items,line_num,true)} return $B.$dict_comp(module_name,scope,items,line_num)} return $B.$gen_expr(module_name,scope,items,line_num) case 'tuple': var packed=this.packed_indices() if(packed.length > 0){return '$B.fast_tuple('+this.unpack(packed)+')'} if(this.tree.length==1 && this.has_comma===undefined){return this.tree[0].to_js()} -return '$B.fast_tuple(['+$to_js(this.tree)+'])'}}} +return '$B.fast_tuple(['+$to_js(this.tree)+'])'}} var $NodeCtx=$B.parser.$NodeCtx=function(node){ this.node=node node.C=this @@ -2317,9 +3356,118 @@ if(_break_flag){break} tree_node=tree_node.parent} if(scope===null){scope=tree_node.parent ||tree_node } this.node.locals=clone(scope.binding) -this.scope=scope -this.toString=function(){return 'node '+this.tree} -this.to_js=function(){if(this.js !==undefined){return this.js} +this.scope=scope} +$NodeCtx.prototype.toString=function(){return 'node '+this.tree} +$NodeCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case 'not': +case 'lambda': +case '.': +var expr=new $AbstractExprCtx(C,true) +return $transition(expr,token,value) +case 'op': +switch(value){case '*': +case '+': +case '-': +case '~': +var expr=new $AbstractExprCtx(C,true) +return $transition(expr,token,value)} +break +case 'async': +return new $AsyncCtx(C) +case 'await': +return new $AbstractExprCtx(new $AwaitCtx(C),true) +case 'class': +return new $ClassCtx(C) +case 'continue': +return new $ContinueCtx(C) +case '__debugger__': +return new $DebuggerCtx(C) +case 'break': +return new $BreakCtx(C) +case 'def': +return new $DefCtx(C) +case 'for': +return new $TargetListCtx(new $ForExpr(C)) +case 'if': +case 'while': +return new $AbstractExprCtx( +new $ConditionCtx(C,token),false) +case 'elif': +var previous=$previous(C) +if(['condition'].indexOf(previous.type)==-1 || +previous.token=='while'){$_SyntaxError(C,'elif after '+previous.type)} +return new $AbstractExprCtx( +new $ConditionCtx(C,token),false) +case 'else': +var previous=$previous(C) +if(['condition','except','for']. +indexOf(previous.type)==-1){$_SyntaxError(C,'else after '+previous.type)} +return new $SingleKwCtx(C,token) +case 'finally': +var previous=$previous(C) +if(['try','except'].indexOf(previous.type)==-1 && +(previous.type !='single_kw' || +previous.token !='else')){$_SyntaxError(C,'finally after '+previous.type)} +return new $SingleKwCtx(C,token) +case 'try': +return new $TryCtx(C) +case 'except': +var previous=$previous(C) +if(['try','except'].indexOf(previous.type)==-1){$_SyntaxError(C,'except after '+previous.type)} +return new $ExceptCtx(C) +case 'assert': +return new $AbstractExprCtx( +new $AssertCtx(C),false,true) +case 'from': +return new $FromCtx(C) +case 'import': +return new $ImportCtx(C) +case 'global': +return new $GlobalCtx(C) +case 'nonlocal': +return new $NonlocalCtx(C) +case 'lambda': +return new $LambdaCtx(C) +case 'pass': +return new $PassCtx(C) +case 'raise': +return new $AbstractExprCtx(new $RaiseCtx(C),true) +case 'return': +return new $AbstractExprCtx(new $ReturnCtx(C),true) +case 'with': +return new $AbstractExprCtx(new $WithCtx(C),false) +case 'yield': +return new $AbstractExprCtx(new $YieldCtx(C),true) +case 'del': +return new $AbstractExprCtx(new $DelCtx(C),true) +case '@': +return new $DecoratorCtx(C) +case ',': +if(C.tree && C.tree.length==0){$_SyntaxError(C,'token '+token+' after '+C)} +var first=C.tree[0] +C.tree=[] +var implicit_tuple=new $ListOrTupleCtx(C) +implicit_tuple.real="tuple" +implicit_tuple.implicit=0 +implicit_tuple.tree.push(first) +first.parent=implicit_tuple +return implicit_tuple +case 'eol': +if(C.tree.length==0){ +C.node.parent.children.pop() +return C.node.parent.C} +return C} +$_SyntaxError(C,'token '+token+' after '+C)} +$NodeCtx.prototype.to_js=function(){if(this.js !==undefined){return this.js} this.js_processed=true if(this.tree.length > 1){var new_node=new $Node() var ctx=new $NodeCtx(new_node) @@ -2331,26 +3479,31 @@ this.js="" if(this.tree[0]){var is_not_def=this.scope.ntype !="def" if(this.tree[0].annotation){ if(is_not_def){if(this.tree[0].type=="expr" && -this.tree[0].tree[0].type=="id"){return "$locals.__annotations__.$string_dict['"+ -this.tree[0].tree[0].value+"'] = "+ -this.tree[0].annotation.to_js()+";"}else if(this.tree[0].type=="def"){ +! this.tree[0].$in_parens && +this.tree[0].tree[0].type=="id"){var js="" +if(this.create_annotations){js+="$locals.__annotations__ = $B.empty_dict();"} +return js+"_b_.dict.$setitem($locals.__annotations__, '"+ +this.tree[0].tree[0].value+"', "+ +this.tree[0].annotation.to_js()+");"}else if(this.tree[0].type=="def"){ this.js=this.tree[0].annotation.to_js()+";"}else{ this.js="" this.tree=[]}}else if(this.tree[0].type !="def"){ this.tree=[]}}else if(this.tree[0].type=="assign" && +! this.tree[0].tree[0].$in_parens && this.tree[0].tree[0].annotation){ var left=this.tree[0].tree[0],right=this.tree[0].tree[1] -this.js="var $value = "+right.to_js()+";" +if(this.create_annotations){this.js+="$locals.__annotations__ = $B.empty_dict();"} +this.js+="var $value = "+right.to_js()+";" this.tree[0].tree.splice(1,1) new $RawJSCtx(this.tree[0],"$value") -if(left.tree[0]&& left.tree[0].type=="id" && is_not_def){this.js+="$locals.__annotations__.$string_dict['"+ -left.tree[0].value+"'] = "+ -left.annotation.to_js()+";"}else{ +if(left.tree[0]&& left.tree[0].type=="id" && is_not_def){this.js+="_b_.dict.$setitem($locals.__annotations__, '"+ +left.tree[0].value+"', "+ +left.annotation.to_js()+");"}else{ this.js+=$to_js(this.tree)+";" if(is_not_def){this.js+=left.annotation.to_js()} return this.js}}} -if(node.children.length==0){this.js+=$to_js(this.tree)+';'}else{this.js+=$to_js(this.tree)} -return this.js}} +if(this.node.children.length==0){this.js+=$to_js(this.tree)+';'}else{this.js+=$to_js(this.tree)} +return this.js} var $NodeJS=$B.parser.$NodeJS=function(js){var node=new $Node() new $NodeJSCtx(node,js) return node} @@ -2358,12 +3511,12 @@ var $NodeJSCtx=$B.parser.$NodeJSCtx=function(node,js){ this.node=node node.C=this this.type='node_js' -this.tree=[js] -this.toString=function(){return 'js '+js} -this.to_js=function(){this.js_processed=true -return js}} +this.tree=[js]} +$NodeJSCtx.prototype.toString=function(){return 'js '+js} +$NodeJSCtx.prototype.to_js=function(){this.js_processed=true +return this.tree[0]} var $NonlocalCtx=$B.parser.$NonlocalCtx=function(C){ -this.type='global' +this.type='nonlocal' this.parent=C this.tree=[] this.names={} @@ -2371,12 +3524,27 @@ C.tree[C.tree.length]=this this.expect='id' this.scope=$get_scope(this) this.scope.nonlocals=this.scope.nonlocals ||{} -if(this.scope.C===undefined){$_SyntaxError(C,["nonlocal declaration not allowed at module level"])} -this.toString=function(){return 'global '+this.tree} -this.add=function(name){if(this.scope.binding[name]=="arg"){$_SyntaxError(C,["name '"+name+"' is parameter and nonlocal"])} +if(this.scope.C===undefined){$_SyntaxError(C,["nonlocal declaration not allowed at module level"])}} +$NonlocalCtx.prototype.toString=function(){return 'nonlocal '+this.tree} +$NonlocalCtx.prototype.add=function(name){if(this.scope.binding[name]=="arg"){$_SyntaxError(C,["name '"+name+"' is parameter and nonlocal"])} this.names[name]=[false,$pos] this.scope.nonlocals[name]=true} -this.transform=function(node,rank){var pscope=this.scope.parent_block +$NonlocalCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.expect=='id'){new $IdCtx(C,value) +C.add(value) +C.expect=',' +return C} +break +case ',': +if(C.expect==','){C.expect='id' +return C} +break +case 'eol': +if(C.expect==','){return $transition(C.parent,token)} +break} +$_SyntaxError(C,'token '+token+' after '+C)} +$NonlocalCtx.prototype.transform=function(node,rank){var C=this.parent,pscope=this.scope.parent_block if(pscope.C===undefined){$_SyntaxError(C,["no binding for nonlocal '"+ $B.last(Object.keys(this.names))+"' found"])}else{while(pscope !==undefined && pscope.C !==undefined){for(var name in this.names){if(pscope.binding[name]!==undefined){this.names[name]=[true]}} pscope=pscope.parent_block} @@ -2384,16 +3552,75 @@ for(var name in this.names){if(!this.names[name][0]){console.log('nonlocal error $pos=this.names[name][1] $_SyntaxError(C,["no binding for nonlocal '"+ name+"' found"])}}}} -this.to_js=function(){this.js_processed=true -return ''}} +$NonlocalCtx.prototype.to_js=function(){this.js_processed=true +return ''} var $NotCtx=$B.parser.$NotCtx=function(C){ this.type='not' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return 'not ('+this.tree+')'} -this.to_js=function(){this.js_processed=true -return '!$B.$bool('+$to_js(this.tree)+')'}} +C.tree[C.tree.length]=this} +$NotCtx.prototype.toString=function(){return 'not ('+this.tree+')'} +$NotCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'in': +C.parent.parent.tree.pop() +return new $ExprCtx(new $OpCtx(C.parent,'not_in'),'op',false) +case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +var expr=new $AbstractExprCtx(C,false) +return $transition(expr,token,value) +case 'op': +var a=value +if('+'==a ||'-'==a ||'~'==a){var expr=new $AbstractExprCtx(C,false) +return $transition(expr,token,value)}} +return $transition(C.parent,token)} +$NotCtx.prototype.to_js=function(){this.js_processed=true +return '!$B.$bool('+$to_js(this.tree)+')'} +var $NumberCtx=$B.parser.$NumberCtx=function(type,C,value){ +this.type=type +this.value=value +this.parent=C +this.tree=[] +C.tree[C.tree.length]=this} +$NumberCtx.prototype.toString=function(){return this.type+' '+this.value} +$NumberCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case 'not': +case 'lambda': +$_SyntaxError(C,'token '+token+' after '+ +C)} +return $transition(C.parent,token,value)} +$NumberCtx.prototype.to_js=function(){this.js_processed=true +var type=this.type,value=this.value +if(type=='int'){var v=parseInt(value[1],value[0]) +if(v > $B.min_int && v < $B.max_int){if(this.unary_op){v=eval(this.unary_op+v)} +return v}else{var v=$B.long_int.$factory(value[1],value[0]) +switch(this.unary_op){case "-": +v=$B.long_int.__neg__(v) +break +case "~": +v=$B.long_int.__invert__(v) +break} +return '$B.fast_long_int("'+v.value+'", '+v.pos+')'}}else if(type=="float"){ +if(/^\d+$/.exec(value)||/^\d+\.\d*$/.exec(value)){return '(new Number('+this.value+'))'} +return '_b_.float.$factory('+value+')'}else if(type=="imaginary"){return '$B.make_complex(0,'+value+')'}} var $OpCtx=$B.parser.$OpCtx=function(C,op){ this.type='op' this.op=op @@ -2403,9 +3630,47 @@ this.scope=$get_scope(this) if(C.type=="expr"){if(['int','float','str'].indexOf(C.tree[0].type)>-1){this.left_type=C.tree[0].type}else if(C.tree[0].type=="id"){var binding=this.scope.binding[C.tree[0].value] if(binding){this.left_type=binding.type}}} C.parent.tree.pop() -C.parent.tree.push(this) -this.toString=function(){return '(op '+this.op+') ['+this.tree+']'} -this.to_js=function(){this.js_processed=true +C.parent.tree.push(this)} +$OpCtx.prototype.toString=function(){return '(op '+this.op+') ['+this.tree+']'} +$OpCtx.prototype.transition=function(token,value){var C=this +if(C.op===undefined){$_SyntaxError(C,['C op undefined '+C])} +if(C.op.substr(0,5)=='unary'){if(token !='eol'){if(C.parent.type=='assign' || +C.parent.type=='return'){ +C.parent.tree.pop() +var t=new $ListOrTupleCtx(C.parent,'tuple') +t.tree.push(C) +C.parent=t +return t}} +if(C.tree.length==2 && C.tree[1].type=="expr" && +C.tree[1].tree[0].type=="int"){ +C.parent.tree.pop() +C.parent.tree.push(C.tree[1]) +C.tree[1].parent=C.parent +C.tree[1].tree[0].unary_op=C.tree[0].op}} +switch(token){case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +return $transition(new $AbstractExprCtx(C,false),token,value) +case 'op': +switch(value){case '+': +case '-': +case '~': +return new $UnaryCtx(C,value)} +default: +if(C.tree[C.tree.length-1].type== +'abstract_expr'){$_SyntaxError(C,'token '+token+' after '+ +C)}} +return $transition(C.parent,token)} +$OpCtx.prototype.to_js=function(){this.js_processed=true var comps={'==':'eq','!=':'ne','>=':'ge','<=':'le','<':'lt','>':'gt'} if(comps[this.op]!==undefined){var method=comps[this.op] if(this.tree[0].type=='expr' && this.tree[1].type=='expr'){var t0=this.tree[0].tree[0],t1=this.tree[1].tree[0],js0=t0.to_js(),js1=t1.to_js() @@ -2425,10 +3690,10 @@ default: return '$B.$TypeError("unorderable types: '+ " int() "+this.op+' str()")'} case 'id': -return 'typeof '+js0+' == "number" ? '+ +return '(typeof '+js0+' == "number" ? '+ js0+this.op+js1+' : $B.rich_comp("__'+ method+'__",'+this.tree[0].to_js()+ -','+this.tree[1].to_js()+')'} +','+this.tree[1].to_js()+'))'} break; case 'str': switch(t0.type){case 'str': @@ -2442,10 +3707,10 @@ default: return '$B.$TypeError("unorderable types: '+ ' str() '+this.op+' int()")'} case 'id': -return 'typeof '+js0+' == "string" ? '+ +return '(typeof '+js0+' == "string" ? '+ js0+this.op+js1+' : $B.rich_comp("__'+ method+'__",'+this.tree[0].to_js()+ -','+this.tree[1].to_js()+')'} +','+this.tree[1].to_js()+'))'} break; case 'id': if(t0.type=='id'){return 'typeof '+js0+'!="object" && typeof '+ @@ -2484,7 +3749,7 @@ if(Number.isSafeInteger(v)){return op+v} return '$B.$getattr('+x.to_js()+', "'+ method+'")()' case 'float': -return 'float.$factory('+op+x.value+')' +return '_b_.float.$factory('+op+x.value+')' case 'imaginary': return '$B.make_complex(0,'+op+x.value+')'}} return '$B.$getattr('+this.tree[1].to_js()+',"'+ @@ -2556,8 +3821,8 @@ if(comps[this.op]!==undefined){return '$B.rich_comp("__'+$operators[this.op]+'__ this.tree[0].to_js()+','+this.tree[1].to_js()+')'}else{return '$B.rich_op("'+$operators[this.op]+'", '+ this.tree[0].to_js()+', '+this.tree[1].to_js()+ ')'}}} -this.simple_js=function(){function sjs(elt){if(elt.type=='op'){return elt.simple_js()} -else if(elt.type=='expr' && elt.tree[0].type=='list_or_tuple' +$OpCtx.prototype.simple_js=function(){var op=this.op +function sjs(elt){if(elt.type=='op'){return elt.simple_js()}else if(elt.type=='expr' && elt.tree[0].type=='list_or_tuple' && elt.tree[0].real=='tuple' && elt.tree[0].tree.length==1 && elt.tree[0].tree[0].type=='expr'){return '('+elt.tree[0].tree[0].tree[0].simple_js()+')'}else{return elt.tree[0].to_js()}} @@ -2565,7 +3830,7 @@ if(op=='+'){return '$B.add('+sjs(this.tree[0])+','+ sjs(this.tree[1])+')'}else if(op=='-'){return '$B.sub('+sjs(this.tree[0])+','+ sjs(this.tree[1])+')'}else if(op=='*'){return '$B.mul('+sjs(this.tree[0])+','+ sjs(this.tree[1])+')'}else if(op=='/'){return '$B.div('+sjs(this.tree[0])+','+ -sjs(this.tree[1])+')'}else{return sjs(this.tree[0])+op+sjs(this.tree[1])}}} +sjs(this.tree[1])+')'}else{return sjs(this.tree[0])+op+sjs(this.tree[1])}} var $PackedCtx=$B.parser.$PackedCtx=function(C){ this.type='packed' if(C.parent.type=='list_or_tuple' && @@ -2575,35 +3840,61 @@ if(child.type=='expr' && child.tree.length > 0 && child.tree[0].type=='packed'){$_SyntaxError(C,["two starred expressions in assignment"])}}} this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(packed) '+this.tree} -this.to_js=function(){this.js_processed=true -return $to_js(this.tree)}} +C.tree[C.tree.length]=this} +$PackedCtx.prototype.toString=function(){return '(packed) '+this.tree} +$PackedCtx.prototype.transition=function(token,value){var C=this +if(C.tree.length > 0 && token=="["){ +return $transition(C.tree[0],token,value)} +if(token=='id'){var expr=new $AbstractExprCtx(C,false) +expr.packed=true +C.parent.expect=',' +var id=$transition(expr,token,value) +return id}else if(token=="["){C.parent.expect=',' +return new $ListOrTupleCtx(C,"list")}else if(token=="("){C.parent.expect=',' +return new $ListOrTupleCtx(C,"tuple")}else if(token=="]"){return $transition(C.parent,token,value)}else if(token=="{"){C.parent.expect=',' +return new $DictOrSetCtx(C)} +return C.parent.transition(token,C)} +$PackedCtx.prototype.to_js=function(){this.js_processed=true +return $to_js(this.tree)} var $PassCtx=$B.parser.$PassCtx=function(C){ this.type='pass' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(pass)'} -this.to_js=function(){this.js_processed=true -return 'void(0)'}} +C.tree[C.tree.length]=this} +$PassCtx.prototype.toString=function(){return '(pass)'} +$PassCtx.prototype.transition=function(token,value){var C=this +if(token=='eol'){return C.parent} +$_SyntaxError(C,'token '+token+' after '+C)} +$PassCtx.prototype.to_js=function(){this.js_processed=true +return 'void(0)'} var $RaiseCtx=$B.parser.$RaiseCtx=function(C){ this.type='raise' this.parent=C this.tree=[] C.tree[C.tree.length]=this -this.toString=function(){return ' (raise) '+this.tree} -this.to_js=function(){this.js_processed=true -var res='' -if(this.tree.length==0){return '$B.$raise()'} -var exc_js=this.tree[0].to_js() -return '$B.$raise('+exc_js+')'}} +this.scope_type=$get_scope(this).ntype} +$RaiseCtx.prototype.toString=function(){return ' (raise) '+this.tree} +$RaiseCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.tree.length==0){return new $IdCtx(new $ExprCtx(C,'exc',false),value)} +break +case 'from': +if(C.tree.length > 0){return new $AbstractExprCtx(C,false)} +break +case 'eol': +return $transition(C.parent,token)} +$_SyntaxError(C,'token '+token+' after '+C)} +$RaiseCtx.prototype.to_js=function(){this.js_processed=true +var exc=this.tree.length==0 ? '' :this.tree[0].to_js() +return '$B.$raise('+exc+')'} var $RawJSCtx=$B.parser.$RawJSCtx=function(C,js){this.type="raw_js" C.tree[C.tree.length]=this this.parent=C -this.toString=function(){return '(js) '+js} -this.to_js=function(){this.js_processed=true -return js}} +this.js=js} +$RawJSCtx.prototype.toString=function(){return '(js) '+this.js} +$RawJSCtx.prototype.transition=function(token,value){var C=this} +$RawJSCtx.prototype.to_js=function(){this.js_processed=true +return this.js} var $ReturnCtx=$B.parser.$ReturnCtx=function(C){ this.type='return' this.parent=C @@ -2611,21 +3902,28 @@ this.tree=[] C.tree[C.tree.length]=this this.scope=$get_scope(this) if(["def","generator"].indexOf(this.scope.ntype)==-1){$_SyntaxError(C,["'return' outside function"])} -var node=$get_node(this) +var node=this.node=$get_node(this) while(node.parent){if(node.parent.C){var elt=node.parent.C.tree[0] if(elt.type=='for'){elt.has_return=true break}else if(elt.type=='try'){elt.has_return=true}else if(elt.type=='single_kw' && elt.token=='finally'){elt.has_return=true}} -node=node.parent} -this.toString=function(){return 'return '+this.tree} -this.to_js=function(){this.js_processed=true +node=node.parent}} +$ReturnCtx.prototype.toString=function(){return 'return '+this.tree} +$ReturnCtx.prototype.transition=function(token,value){var C=this +return $transition(C.parent,token)} +$ReturnCtx.prototype.to_js=function(){this.js_processed=true if(this.tree.length==1 && this.tree[0].type=='abstract_expr'){ this.tree.pop() new $IdCtx(new $ExprCtx(this,'rvalue',false),'None')} var scope=this.scope -if(scope.ntype=='generator'){return 'return [$B.generator_return('+$to_js(this.tree)+')]'} -var js='var $res = '+$to_js(this.tree)+';'+'$B.leave_frame' +if(scope.ntype=='generator'){return 'var $res = '+$to_js(this.tree)+'; $B.leave_frame({$locals});'+ +'return $B.generator_return($res)'} +var indent=' '.repeat(this.node.indent-1) +var js='var $res = '+$to_js(this.tree)+';\n'+indent+ +'if($locals.$f_trace !== _b_.None){$B.trace_return($res)}\n'+indent+ +'$B.leave_frame' if(scope.id.substr(0,6)=='$exec_'){js+='_exec'} -return js+'("'+scope.id+'");return $res'}} +js+='({$locals});\n'+indent+'return $res' +return js} var $SingleKwCtx=$B.parser.$SingleKwCtx=function(C,token){ this.type='single_kw' this.token=token @@ -2641,49 +3939,93 @@ if(elt.type=='for' || elt.type=='asyncfor' || (elt.type=='condition' && elt.token=='while')){elt.has_break=true elt.else_node=$get_node(this) -this.loop_num=elt.loop_num}}} -this.toString=function(){return this.token} -this.transform=function(node,rank){ +this.loop_num=elt.loop_num}}}} +$SingleKwCtx.prototype.toString=function(){return this.token} +$SingleKwCtx.prototype.transition=function(token,value){var C=this +if(token==':'){return $BodyCtx(C)} +$_SyntaxError(C,'token '+token+' after '+C)} +$SingleKwCtx.prototype.transform=function(node,rank){ if(this.token=='finally'){var scope=$get_scope(this) -if(scope.ntype !='generator'){node.insert(0,$NodeJS('var $exit;'+ -'if($B.frames_stack.length<$stack_length){'+ +node.insert(0,$NodeJS('var $exit;'+ +'if($B.frames_stack.length < $stack_length){'+ '$exit = true;'+ '$B.frames_stack.push($top_frame)'+ '}') ) var scope_id=scope.id.replace(/\./g,'_') var last_child=node.children[node.children.length-1] -if(last_child.C.tree[0].type !="return"){node.add($NodeJS('if($exit){$B.leave_frame()}'))}}}} -this.to_js=function(){this.js_processed=true +if(last_child.C.tree[0].type !="return"){node.add($NodeJS('if($exit){$B.leave_frame({$locals})}'))}}} +$SingleKwCtx.prototype.to_js=function(){this.js_processed=true if(this.token=='finally'){return this.token} if(this.loop_num !==undefined){var scope=$get_scope(this) var res='if($locals_'+scope.id.replace(/\./g,'_') return res+'["$no_break'+this.loop_num+'"])'} -return this.token}} +return this.token} var $SliceCtx=$B.parser.$SliceCtx=function(C){ this.type='slice' this.parent=C this.tree=C.tree.length > 0 ?[C.tree.pop()]:[] -C.tree.push(this) -this.to_js=function(){for(var i=0;i < this.tree.length;i++){if(this.tree[i].type=="abstract_expr"){this.tree[i].to_js=function(){return "None"}}} -return "slice.$factory("+$to_js(this.tree)+")"}} +C.tree.push(this)} +$SliceCtx.prototype.transition=function(token,value){var C=this +if(token==":"){return new $AbstractExprCtx(C,false)} +return $transition(C.parent,token,value)} +$SliceCtx.prototype.to_js=function(){for(var i=0;i < this.tree.length;i++){if(this.tree[i].type=="abstract_expr"){this.tree[i].to_js=function(){return "_b_.None"}}} +return "_b_.slice.$factory("+$to_js(this.tree)+")"} var $StarArgCtx=$B.parser.$StarArgCtx=function(C){ this.type='star_arg' this.parent=C this.tree=[] -C.tree[C.tree.length]=this -this.toString=function(){return '(star arg) '+this.tree} -this.to_js=function(){this.js_processed=true -return '{$nat:"ptuple",arg:'+$to_js(this.tree)+'}'}} +C.tree[C.tree.length]=this} +$StarArgCtx.prototype.toString=function(){return '(star arg) '+this.tree} +$StarArgCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.parent.type=="target_list"){C.tree.push(value) +C.parent.expect=',' +return C.parent} +return $transition(new $AbstractExprCtx(C,false),token,value) +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case 'not': +case 'lambda': +return $transition(new $AbstractExprCtx(C,false),token,value) +case ',': +case ')': +if(C.tree.length==0){$_SyntaxError(C,"unnamed star argument")} +return $transition(C.parent,token) +case ':': +if(C.parent.parent.type=='lambda'){return $transition(C.parent.parent,token)}} +$_SyntaxError(C,'token '+token+' after '+C)} +$StarArgCtx.prototype.to_js=function(){this.js_processed=true +return '{$nat:"ptuple",arg:'+$to_js(this.tree)+'}'} var $StringCtx=$B.parser.$StringCtx=function(C,value){ this.type='str' this.parent=C this.tree=[value] C.tree[C.tree.length]=this -this.raw=false -this.toString=function(){return 'string '+(this.tree ||'')} -this.to_js=function(){this.js_processed=true -var res='',type=null,scope=$get_scope(this) +this.raw=false} +$StringCtx.prototype.toString=function(){return 'string '+(this.tree ||'')} +$StringCtx.prototype.transition=function(token,value){var C=this +switch(token){case '[': +return new $AbstractExprCtx(new $SubCtx(C.parent),false) +case '(': +C.parent.tree[0]=C +return new $CallCtx(C.parent) +case 'str': +C.tree.push(value) +return C} +return $transition(C.parent,token,value)} +$StringCtx.prototype.to_js=function(){this.js_processed=true +var res='',type=null,scope=$get_scope(this),has_surrogate=false +function _has_surrogate(s){for(var i=0;i < s.length;i++){try{code=s.charCodeAt(i)}catch(err){console.log("err for s",s) +throw err} +if(code >=0xD800 && code <=0xDBFF){return true}} +return false} function fstring(parsed_fstring){ var elts=[] for(var i=0;i < parsed_fstring.length;i++){if(parsed_fstring[i].type=='expression'){var expr=parsed_fstring[i].expression @@ -2693,10 +4035,9 @@ if(car==":" && br_stack.length==0){parts=[expr.substr(0,pos),expr.substr(pos+1)] break}else if("{[(".indexOf(car)>-1){br_stack.push(car)}else if(")]}".indexOf(car)>-1){br_stack.pop()} pos++} expr=parts[0] -var save_pos=$pos,temp_id="temp"+$B.UUID() -var expr_node=$B.py2js(expr,scope.module,temp_id,scope) +var save_pos=$pos +var expr_node=$B.py2js(expr,scope.module,scope.id,scope) expr_node.to_js() -delete $B.$py_src[temp_id] $pos=save_pos for(var j=0;j < expr_node.children.length;j++){var node=expr_node.children[j] if(node.C.tree && node.C.tree.length==1 && @@ -2708,37 +4049,44 @@ while("\n;".indexOf(expr1.charAt(expr1.length-1))>-1){expr1=expr1.substr(0,expr1 break} break}} switch(parsed_fstring[i].conversion){case "a": -expr1='$B.builtins.ascii('+expr1+')' +expr1='_b_.ascii('+expr1+')' break case "r": -expr1='$B.builtins.repr('+expr1+')' +expr1='_b_.repr('+expr1+')' break case "s": -expr1='$B.builtins.str.$factory('+expr1+')' +expr1='_b_.str.$factory('+expr1+')' break} var fmt=parts[1] if(fmt !==undefined){ var parsed_fmt=$B.parse_fstring(fmt) if(parsed_fmt.length > 1){fmt=fstring(parsed_fmt)}else{fmt="'"+fmt+"'"} -var res1="$B.builtins.str.format('{0:' + "+ +var res1="_b_.str.format('{0:' + "+ fmt+" + '}', "+expr1+")" -elts.push(res1)}else{if(parsed_fstring[i].conversion===null){expr1='$B.builtins.str.$factory('+expr1+')'} +elts.push(res1)}else{if(parsed_fstring[i].conversion===null){expr1='_b_.str.$factory('+expr1+')'} elts.push(expr1)}}else{var re=new RegExp("'","g") var elt=parsed_fstring[i].replace(re,"\\'") -.replace("\n","\\n") +.replace(/\n/g,"\\n") +has_surrogate=has_surrogate ||_has_surrogate(elt) elts.push("'"+elt+"'")}} return elts.join(' + ')} +function prepare(value){value=value.replace(/\n/g,'\\n\\\n') +value=value.replace(/\\U([A-Fa-f0-9]{8})/gm,function(mo){return String.fromCharCode("0x"+mo.slice(2))}) +return value} for(var i=0;i < this.tree.length;i++){if(this.tree[i].type=="call"){ -var js='(function(){throw TypeError.$factory("'+"'str'"+ +var js='(function(){throw _b_.TypeError.$factory("'+"'str'"+ ' object is not callable")}())' return js}else{var value=this.tree[i],is_fstring=Array.isArray(value),is_bytes=false if(!is_fstring){is_bytes=value.charAt(0)=='b'} if(type==null){type=is_bytes -if(is_bytes){res+='bytes.$factory('}}else if(type !=is_bytes){return '$B.$TypeError("can\'t concat bytes to str")'} -if(!is_bytes){if(is_fstring){res+=fstring(value)}else{res+=value.replace(/\n/g,'\\n\\\n')}}else{res+=value.substr(1).replace(/\n/g,'\\n\\\n')} +if(is_bytes){res+='_b_.bytes.$new(_b_.bytes, '}}else if(type !=is_bytes){return '$B.$TypeError("can\'t concat bytes to str")'} +if(!is_bytes){if(is_fstring){res+=fstring(value)}else{has_surrogate=has_surrogate ||_has_surrogate(value) +res+=prepare(value)}}else{res+=prepare(value.substr(1))} if(i < this.tree.length-1){res+='+'}}} if(is_bytes){res+=',"ISO-8859-1")'} -return res}} +if(res.length==0){res='""'} +if(has_surrogate){res="_b_.str.$surrogate.$factory("+res+")"} +return res} var $SubCtx=$B.parser.$SubCtx=function(C){ this.type='sub' this.func='getitem' @@ -2746,9 +4094,33 @@ this.value=C.tree[0] C.tree.pop() C.tree[C.tree.length]=this this.parent=C -this.tree=[] -this.toString=function(){return '(sub) (value) '+this.value+' (tree) '+this.tree} -this.to_js=function(){this.js_processed=true +this.tree=[]} +$SubCtx.prototype.toString=function(){return '(sub) (value) '+this.value+' (tree) '+this.tree} +$SubCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +case 'imaginary': +case 'int': +case 'float': +case 'str': +case 'bytes': +case '[': +case '(': +case '{': +case '.': +case 'not': +case 'lambda': +var expr=new $AbstractExprCtx(C,false) +return $transition(expr,token,value) +case ']': +if(C.parent.packed){return C.parent.tree[0]} +if(C.tree[0].tree.length > 0){return C.parent} +break +case ':': +return new $AbstractExprCtx(new $SliceCtx(C),false) +case ',': +return new $AbstractExprCtx(C,false)} +$_SyntaxError(C,'token '+token+' after '+C)} +$SubCtx.prototype.to_js=function(){this.js_processed=true if(this.func=='getitem' && this.value.type=='id'){var type=$get_node(this).locals[this.value.value],val=this.value.to_js() if(type=='list' ||type=='tuple'){if(this.tree.length==1){return '$B.list_key('+val+ ', '+this.tree[0].to_js()+')'}else if(this.tree.length==2){return '$B.list_slice('+val+ @@ -2757,7 +4129,9 @@ if(type=='list' ||type=='tuple'){if(this.tree.length==1){return '$B.list_key('+v ', '+(this.tree[0].to_js()||"null")+','+ (this.tree[1].to_js()||"null")+','+ (this.tree[2].to_js()||"null")+')'}}} -if(this.func=='getitem' && this.tree.length==1){return '$B.$getitem('+this.value.to_js()+','+ +if(this.func=='getitem' && this.tree.length==1){if(this.tree[0].type=="slice"){return `$B.getitem_slice(${this.value.to_js()},`+ +`${this.tree[0].to_js()})`} +return '$B.$getitem('+this.value.to_js()+','+ this.tree[0].to_js()+')'} var res='',shortcut=false if(this.func !=='delitem' && @@ -2773,59 +4147,96 @@ x.value.to_js()+' == "string") && '+subs+ var val=this.value.to_js() res+='$B.$getattr('+val+',"__'+this.func+'__")(' if(this.tree.length==1){res+=this.tree[0].to_js()+')'}else{var res1=[] -this.tree.forEach(function(elt){if(elt.type=='abstract_expr'){res1.push('None')} +this.tree.forEach(function(elt){if(elt.type=='abstract_expr'){res1.push('_b_.None')} else{res1.push(elt.to_js())}}) -res+='tuple.$factory(['+res1.join(',')+']))'} -return shortcut ? res+')' :res}} +res+='_b_.tuple.$factory(['+res1.join(',')+']))'} +return shortcut ? res+')' :res} var $TargetListCtx=$B.parser.$TargetListCtx=function(C){ this.type='target_list' this.parent=C this.tree=[] this.expect='id' -C.tree[C.tree.length]=this -this.toString=function(){return '(target list) '+this.tree} -this.to_js=function(){this.js_processed=true -return $to_js(this.tree)}} +C.tree[C.tree.length]=this} +$TargetListCtx.prototype.toString=function(){return '(target list) '+this.tree} +$TargetListCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.expect=='id'){C.expect=',' +return new $IdCtx( +new $ExprCtx(C,'target',false),value)} +case 'op': +if(C.expect=='id' && value=='*'){ +return new $PackedCtx(C)} +case '(': +case '[': +if(C.expect=='id'){C.expect=',' +return new $ListOrTupleCtx(C,token=='(' ? 'tuple' :'list')} +case ')': +case ']': +if(C.expect==','){return C.parent} +case ',': +if(C.expect==','){C.expect='id' +return C}} +if(C.expect==','){return $transition(C.parent,token,value)}else if(token=='in'){ +return $transition(C.parent,token,value)} +$_SyntaxError(C,'token '+token+' after '+C)} +$TargetListCtx.prototype.to_js=function(){this.js_processed=true +return $to_js(this.tree)} var $TernaryCtx=$B.parser.$TernaryCtx=function(C){ this.type='ternary' this.parent=C.parent C.parent.tree.pop() C.parent.tree.push(this) C.parent=this -this.tree=[C] -this.toString=function(){return '(ternary) '+this.tree} -this.to_js=function(){this.js_processed=true +this.tree=[C]} +$TernaryCtx.prototype.toString=function(){return '(ternary) '+this.tree} +$TernaryCtx.prototype.transition=function(token,value){var C=this +if(token=='else'){C.in_else=true +return new $AbstractExprCtx(C,false)}else if(! C.in_else){$_SyntaxError(C,'token '+token+' after '+C)}else if(token==","){ +if(["assign","augm_assign","node","return"]. +indexOf(C.parent.type)>-1){C.parent.tree.pop() +var t=new $ListOrTupleCtx(C.parent,'tuple') +t.implicit=true +t.tree[0]=C +C.parent=t +t.expect="id" +return t}} +return $transition(C.parent,token,value)} +$TernaryCtx.prototype.to_js=function(){this.js_processed=true var res='$B.$bool('+this.tree[1].to_js()+') ? ' res+=this.tree[0].to_js()+' : ' -return res+this.tree[2].to_js()}} +return res+this.tree[2].to_js()} var $TryCtx=$B.parser.$TryCtx=function(C){ this.type='try' this.parent=C -C.tree[C.tree.length]=this -this.toString=function(){return '(try) '} -this.transform=function(node,rank){if(node.parent.children.length==rank+1){$_SyntaxError(C,"missing clause after 'try'")}else{var next_ctx=node.parent.children[rank+1].C.tree[0] +C.tree[C.tree.length]=this} +$TryCtx.prototype.toString=function(){return '(try) '} +$TryCtx.prototype.transition=function(token,value){var C=this +if(token==':'){return $BodyCtx(C)} +$_SyntaxError(C,'token '+token+' after '+C)} +$TryCtx.prototype.transform=function(node,rank){if(node.parent.children.length==rank+1){$_SyntaxError(C,["unexpected EOF while parsing"])}else{var next_ctx=node.parent.children[rank+1].C.tree[0] switch(next_ctx.type){case 'except': case 'finally': case 'single_kw': break default: -$_SyntaxError(C,"missing clause after 'try'")}} +$pos=node.parent.children[rank+1].pos +$_SyntaxError(node.parent.children[rank+1].C.tree[0],"no clause after try")}} var scope=$get_scope(this) var error_name=create_temp_name('$err') var failed_name="$locals."+create_temp_name('$failed') var js=failed_name+' = false;\n'+ ' '.repeat(node.indent+4)+'try' new $NodeJSCtx(node,js) -node.is_try=true node.has_return=this.has_return var catch_node=$NodeJS('catch('+error_name+')') -catch_node.is_catch=true node.parent.insert(rank+1,catch_node) catch_node.add($NodeJS("$B.set_exc("+error_name+")")) +catch_node.add($NodeJS("if($locals.$f_trace !== _b_.None)"+ +"{$locals.$f_trace = $B.trace_exception()}")) catch_node.add( $NodeJS(failed_name+' = true;'+ '$B.pmframe = $B.last($B.frames_stack);'+ -'if(0){}') +'if(false){}') ) var pos=rank+2,has_default=false, has_else=false, @@ -2866,25 +4277,82 @@ else_body.children.forEach(function(elt){else_node.add(elt)}) if(has_finally){finally_node.insert(0,else_node)}else{node.parent.insert(pos,else_node)} pos++} $loop_num++} -this.to_js=function(){this.js_processed=true -return 'try'}} +$TryCtx.prototype.to_js=function(){this.js_processed=true +return 'try'} var $UnaryCtx=$B.parser.$UnaryCtx=function(C,op){ this.type='unary' this.op=op this.parent=C -C.tree[C.tree.length]=this -this.toString=function(){return '(unary) '+this.op} +C.tree[C.tree.length]=this} +$UnaryCtx.prototype.toString=function(){return '(unary) '+this.op} +$UnaryCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'int': +case 'float': +case 'imaginary': +var expr=C.parent +C.parent.parent.tree.pop() +if(C.op=='-'){value="-"+value} +else if(C.op=='~'){value=~value} +return $transition(C.parent.parent,token,value) +case 'id': +C.parent.parent.tree.pop() +var expr=new $ExprCtx(C.parent.parent,'call',false) +var expr1=new $ExprCtx(expr,'id',false) +new $IdCtx(expr1,value) +var repl=new $AttrCtx(expr) +if(C.op=='+'){repl.name='__pos__'} +else if(C.op=='-'){repl.name='__neg__'} +else{repl.name='__invert__'} +return expr1 +case 'op': +if('+'==value ||'-'==value){if(C.op===value){C.op='+'} +else{C.op='-'} +return C}} +return $transition(C.parent,token,value)} this.to_js=function(){this.js_processed=true -return this.op}} +return this.op} var $WithCtx=$B.parser.$WithCtx=function(C){ this.type='with' this.parent=C C.tree[C.tree.length]=this this.tree=[] this.expect='as' -this.scope=$get_scope(this) -this.toString=function(){return '(with) '+this.tree} -this.set_alias=function(ctx){var ids=[] +this.scope=$get_scope(this)} +$WithCtx.prototype.toString=function(){return '(with) '+this.tree} +$WithCtx.prototype.transition=function(token,value){var C=this +switch(token){case 'id': +if(C.expect=='id'){C.expect='as' +return $transition( +new $AbstractExprCtx(C,false),token,value)} +$_SyntaxError(C,'token '+token+' after '+C) +case 'as': +return new $AbstractExprCtx(new $AliasCtx(C)) +case ':': +switch(C.expect){case 'id': +case 'as': +case ':': +return $BodyCtx(C)} +break +case '(': +if(C.expect=='id' && C.tree.length==0){C.parenth=true +return C}else if(C.expect=='alias'){C.expect=':' +return new $TargetListCtx(C,false)} +break +case ')': +if(C.expect==',' ||C.expect=='as'){C.expect=':' +return C} +break +case ',': +if(C.parenth !==undefined && +C.has_alias===undefined && +(C.expect==',' ||C.expect=='as')){C.expect='id' +return C}else if(C.expect=='as'){C.expect='id' +return C}else if(C.expect==':'){C.expect='id' +return C} +break} +$_SyntaxError(C,'token '+token+' after '+ +C.expect)} +$WithCtx.prototype.set_alias=function(ctx){var ids=[] if(ctx.type=="id"){ids=[ctx]}else if(ctx.type=="list_or_tuple"){ ctx.tree.forEach(function(expr){if(expr.type=="expr" && expr.tree[0].type=="id"){ids.push(expr.tree[0])}})} for(var i=0,len=ids.length;i < len;i++){var id_ctx=ids[i] @@ -2892,7 +4360,7 @@ $bind(id_ctx.value,this.scope,this) id_ctx.bound=true if(this.scope.ntype !=='module'){ this.scope.C.tree[0].locals.push(id_ctx.value)}}} -this.transform=function(node,rank){while(this.tree.length > 1){ +$WithCtx.prototype.transform=function(node,rank){while(this.tree.length > 1){ var suite=node.children,item=this.tree.pop(),new_node=new $Node(),ctx=new $NodeCtx(new_node),with_ctx=new $WithCtx(ctx) item.parent=with_ctx with_ctx.tree=[item] @@ -2901,7 +4369,6 @@ suite.forEach(function(elt){new_node.add(elt)}) node.children=[new_node]} if(this.transformed){return} this.prefix="" -if(this.scope.ntype=="generator"){this.prefix="$locals."} if(this.tree.length > 1){var nw=new $Node(),ctx=new $NodeCtx(nw) nw.parent=node nw.module=node.module @@ -2915,7 +4382,6 @@ this.transformed=true return} if(this.async){return this.transform_async(node,rank)} var top_try_node=$NodeJS("try") -top_try_node.is_try=true node.parent.insert(rank+1,top_try_node) var num=this.num=$loop_num++ top_try_node.ctx_manager_num=num @@ -2924,6 +4390,7 @@ this.cmexit_name=this.prefix+'$ctx_manager_exit'+num this.exc_name=this.prefix+'$exc'+num this.err_name='$err'+num this.val_name='$value'+num +this.yield_name=this.prefix+'$yield'+num if(this.tree[0].alias===null){this.tree[0].alias='$temp'} if(this.tree[0].type=='expr' && this.tree[0].tree[0].type=='list_or_tuple'){if(this.tree[1].type !='expr' || @@ -2938,7 +4405,6 @@ this.tree.splice(0,0,ids[i])}} var block=node.children node.children=[] var try_node=new $Node() -try_node.is_try=true new $NodeJSCtx(try_node,'try') top_try_node.add(try_node) if(this.tree[0].alias){var alias=this.tree[0].alias.tree[0].tree[0].value @@ -2946,16 +4412,15 @@ try_node.add($NodeJS('$locals'+'["'+alias+'"] = '+ this.val_name))} block.forEach(function(elt){try_node.add(elt)}) var catch_node=new $Node() -catch_node.is_catch=true new $NodeJSCtx(catch_node,'catch('+this.err_name+')') var js=this.exc_name+' = false;'+this.err_name+ -' = $B.exception('+this.err_name+')\n'+ +' = $B.exception('+this.err_name+', true)\n'+ ' '.repeat(node.indent+4)+ 'var $b = '+this.cmexit_name+'('+ this.err_name+'.__class__,'+ this.err_name+','+ -'$B.$getattr('+this.err_name+', "traceback"));' -if(this.scope.ntype=="generator"){js+='delete '+this.cmexit_name+';'} +'$B.$getattr('+this.err_name+', "__traceback__"));' +if(this.scope.ntype=="generator"){js+='$B.set_cm_in_generator('+this.cmexit_name+');'} js+='if(!$B.$bool($b)){throw '+this.err_name+'}' catch_node.add($NodeJS(js)) top_try_node.add(catch_node) @@ -2967,16 +4432,13 @@ finally_node.C.in_ctx_manager=true finally_node.is_except=true finally_node.in_ctx_manager=true var js='if('+this.exc_name -if(this.scope.ntype=="generator"){js+=' && (!$yield)'+ -' && '+this.cmexit_name} -js+='){;'+this.cmexit_name+'(None,None,None);' +js+='){'+this.cmexit_name+'(_b_.None, _b_.None, _b_.None);' if(this.scope.ntype=="generator"){js+='delete '+this.cmexit_name} -js+='}' -if(this.scope.ntype=="generator"){js+='$yield = undefined'} +js+='};' finally_node.add($NodeJS(js)) node.parent.insert(rank+2,finally_node) this.transformed=true} -this.transform_async=function(node,rank){ +$WithCtx.prototype.transform_async=function(node,rank){ var scope=$get_scope(this),expr=this.tree[0],alias=this.tree[0].alias var new_nodes=[] var num=this.num=$loop_num++ @@ -2989,1483 +4451,219 @@ new_nodes.push($NodeJS(' '+cmtype_name+ ' = _b_.type.$factory('+this.cm_name+'),')) new_nodes.push($NodeJS(' '+this.cmexit_name+ ' = $B.$call($B.$getattr('+cmtype_name+', "__aexit__")),')) -new_nodes.push($NodeJS(' '+cmenter_name+ -' = $B.$call($B.$getattr('+cmtype_name+', "__aenter__"))'+ -'('+this.cm_name+'),')) -new_nodes.push($NodeJS(" "+this.exc_name+" = false")) -js="" -if(alias){if(alias.tree[0].tree[0].type !="list_or_tuple"){var js=alias.tree[0].to_js()+' = '+ -'await $B.promise('+cmenter_name+')' -new_nodes.push($NodeJS(js))}else{ -var new_node=new $Node(),ctx=new $NodeCtx(new_node),expr=new $ExprCtx(ctx,"left",false) -expr.tree.push(alias.tree[0].tree[0]) -alias.tree[0].tree[0].parent=expr -var assign=new $AssignCtx(expr) -new $RawJSCtx(assign,'await $B.promise('+ -cmenter_name+')') -new_nodes.push(new_node)}} -var try_node=new $NodeJS('try') -node.children.forEach(function(child){try_node.add(child)}) -new_nodes.push(try_node) -var catch_node=new $NodeJS('catch(err)') -new_nodes.push(catch_node) -catch_node.add($NodeJS(this.exc_name+' = true')) -catch_node.add($NodeJS('var '+err_name+ -' = $B.imported["_sys"].exc_info()')) -var if_node=$NodeJS('if(! await $B.promise('+ -this.cmexit_name+'('+this.cm_name+', '+err_name+'[0], '+ -err_name+'[1], '+err_name+'[2])))') -catch_node.add(if_node) -if_node.add($NodeJS('$B.$raise()')) -var else_node=$NodeJS('if(! '+this.exc_name+')') -new_nodes.push(else_node) -else_node.add($NodeJS('await $B.promise('+ -this.cm_name+', _b_.None, _b_.None, _b_.None)')) -node.parent.children.splice(rank,1) -for(var i=new_nodes.length-1;i >=0;i--){node.parent.insert(rank,new_nodes[i])} -node.children=[] -return 0} -this.to_js=function(){this.js_processed=true -var indent=$get_node(this).indent,h=' '.repeat(indent),num=this.num -var head=this.prefix=="" ? "var " :this.prefix,cm_name='$ctx_manager'+num,cme_name=head+'$ctx_manager_exit'+num,exc_name=head+'$exc'+num,val_name='$value'+num -return 'var '+cm_name+' = '+this.tree[0].to_js()+'\n'+ -h+cme_name+' = $B.$getattr('+cm_name+',"__exit__")\n'+ -h+'var '+val_name+' = $B.$getattr('+cm_name+',"__enter__")()\n'+ -h+exc_name+' = true\n'}} -var $YieldCtx=$B.parser.$YieldCtx=function(C,is_await){ -this.type='yield' -this.parent=C -this.tree=[] -C.tree[C.tree.length]=this -var in_lambda=false,in_ctx_manager=false,parent=C -while(parent){if(parent.type=="lambda"){in_lambda=true -break} -parent=parent.parent} -if(! in_lambda){switch(C.type){case 'node': -break; -case 'assign': -case 'tuple': -case 'list_or_tuple': -$get_node(C).yield_atoms.push(this) -break -default: -$_SyntaxError(C,'yield atom must be inside ()')}} -var scope=this.scope=$get_scope(this,true) -if(! in_lambda){var in_func=scope.is_function,func_scope=scope -if(! in_func && scope.is_comp){var parent=scope.parent_block -while(parent.is_comp){parent=parent_block} -in_func=parent.is_function -func_scope=parent} -if(! in_func){$_SyntaxError(C,["'yield' outside function"])}} -if(! in_lambda){var def=func_scope.C.tree[0] -if(! is_await){def.type='generator' -func_scope.ntype='generator'} -def.yields.push(this)} -this.toString=function(){return '(yield) '+(this.from ? '(from) ' :'')+this.tree} -this.transform=function(node,rank){ -var new_node=$NodeJS('// placeholder for generator sent value') -new_node.is_set_yield_value=true -new_node.after_yield=true -new_node.indent=node.indent -node.parent.insert(rank+1,new_node)} -this.to_js=function(){this.js_processed=true -if(this.from===undefined){return $to_js(this.tree)||'None'} -return $to_js(this.tree)}} -var $add_profile=$B.parser.$add_profile=function(node,rank){if(node.type=='module'){var i=0 -while(i < node.children.length){i+=$add_profile(node.children[i],i)}}else{var elt=node.C.tree[0],offset=1,flag=true,pnode=node -while(pnode.parent !==undefined){pnode=pnode.parent} -var mod_id=pnode.id -if(node.line_num===undefined){flag=false} -if(elt.type=='condition' && elt.token=='elif'){flag=false} -else if(elt.type=='except'){flag=false} -else if(elt.type=='single_kw'){flag=false} -if(flag){ -var new_node=new $Node() -new $NodeJSCtx(new_node,';$B.$profile.count("'+mod_id+'",'+node.line_num+');') -node.parent.insert(rank,new_node) -offset=2} -var i=0 -while(i < node.children.length){i+=$add_profile(node.children[i],i)} -return offset}} -var $add_line_num=$B.parser.$add_line_num=function(node,rank){if(node.type=='module'){var i=0 -while(i < node.children.length){i+=$add_line_num(node.children[i],i)}}else if(node.type !=='marker'){var elt=node.C.tree[0],offset=1,flag=true,pnode=node -while(pnode.parent !==undefined){pnode=pnode.parent} -var mod_id=pnode.id -var line_num=node.line_num ||node.forced_line_num -if(line_num===undefined){flag=false} -if(elt.type=='condition' && elt.token=='elif'){flag=false} -else if(elt.type=='except'){flag=false} -else if(elt.type=='single_kw'){flag=false} -if(flag){ -var js=';$locals.$line_info = "'+line_num+','+ -mod_id+'";' -var new_node=new $Node() -new_node.is_line_num=true -new $NodeJSCtx(new_node,js) -node.parent.insert(rank,new_node) -offset=2} -var i=0 -while(i < node.children.length){i+=$add_line_num(node.children[i],i)} -if((elt.type=='condition' && elt.token=="while") -||node.C.type=='for'){if($B.last(node.children).C.tree[0].type !="return"){node.add($NodeJS('$locals.$line_info = "'+line_num+ -','+mod_id+'";'))}} -return offset}else{return 1}} -$B.$add_line_num=$add_line_num -var $bind=$B.parser.$bind=function(name,scope,C){ -if(scope.nonlocals && scope.nonlocals[name]){ -return} -if(scope.globals && scope.globals.has(name)){var module=$get_module(C) -module.binding[name]=true -return} -var node=$get_node(C) -node.bindings=node.bindings ||{} -node.bindings[name]=true -scope.binding=scope.binding ||{} -if(scope.binding[name]===undefined){scope.binding[name]=true}} -function $parent_match(ctx,obj){ -var flag -while(ctx.parent){flag=true -for(var attr in obj){if(ctx.parent[attr]!=obj[attr]){flag=false -break}} -if(flag){return ctx.parent} -ctx=ctx.parent} -return false} -var $previous=$B.parser.$previous=function(C){var previous=C.node.parent.children[C.node.parent.children.length-2] -if(!previous ||!previous.C){$_SyntaxError(C,'keyword not following correct keyword')} -return previous.C.tree[0]} -var $get_docstring=$B.parser.$get_docstring=function(node){var doc_string='' -if(node.children.length > 0){var firstchild=node.children[0] -if(firstchild.C.tree && firstchild.C.tree.length > 0 && -firstchild.C.tree[0].type=='expr'){var expr=firstchild.C.tree[0].tree[0] -if(expr.type=='str' && !Array.isArray(expr.tree[0])){doc_string=firstchild.C.tree[0].tree[0].to_js()}}} -return doc_string} -var $get_scope=$B.parser.$get_scope=function(C,flag){ -var ctx_node=C.parent -while(ctx_node.type !=='node'){ctx_node=ctx_node.parent} -var tree_node=ctx_node.node,scope=null -while(tree_node.parent && tree_node.parent.type !=='module'){var ntype=tree_node.parent.C.tree[0].type -switch(ntype){case 'def': -case 'class': -case 'generator': -var scope=tree_node.parent -scope.ntype=ntype -scope.is_function=ntype !='class' -return scope} -tree_node=tree_node.parent} -var scope=tree_node.parent ||tree_node -scope.ntype="module" -return scope} -var $get_line_num=$B.parser.$get_line_num=function(C){var ctx_node=$get_node(C),line_num=ctx_node.line_num -if(ctx_node.line_num===undefined){ctx_node=ctx_node.parent -while(ctx_node && ctx_node.line_num===undefined){ctx_node=ctx_node.parent} -if(ctx_node && ctx_node.line_num){line_num=ctx_node.line_num}} -return line_num} -var $get_module=$B.parser.$get_module=function(C){ -var ctx_node=C.parent -while(ctx_node.type !=='node'){ctx_node=ctx_node.parent} -var tree_node=ctx_node.node -if(tree_node.ntype=="module"){return tree_node} -var scope=null -while(tree_node.parent.type !='module'){tree_node=tree_node.parent} -var scope=tree_node.parent -scope.ntype="module" -return scope} -var $get_src=$B.parser.$get_src=function(C){ -var node=$get_node(C) -while(node.parent !==undefined){node=node.parent} -return node.src} -var $get_node=$B.parser.$get_node=function(C){var ctx=C -while(ctx.parent){ctx=ctx.parent} -return ctx.node} -var $to_js_map=$B.parser.$to_js_map=function(tree_element){if(tree_element.to_js !==undefined){return tree_element.to_js()} -throw Error('no to_js() for '+tree_element)} -var $to_js=$B.parser.$to_js=function(tree,sep){if(sep===undefined){sep=','} -return tree.map($to_js_map).join(sep)} -var $mangle=$B.parser.$mangle=function(name,C){ -if(name.substr(0,2)=="__" && name.substr(name.length-2)!=="__"){var klass=null,scope=$get_scope(C) -while(true){if(scope.ntype=="module"){return name} -else if(scope.ntype=="class"){var class_name=scope.C.tree[0].name -while(class_name.charAt(0)=='_'){class_name=class_name.substr(1)} -return '_'+class_name+name}else{if(scope.parent && scope.parent.C){scope=$get_scope(scope.C.tree[0])}else{return name}}}}else{return name}} -var $transition=$B.parser.$transition=function(C,token,value){ -switch(C.type){case 'abstract_expr': -var packed=C.packed,is_await=C.is_await,assign=C.assign -if(! assign){switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -case 'yield': -C.parent.tree.pop() -var commas=C.with_commas -C=C.parent -C.packed=packed -C.is_await=is_await -if(assign){console.log("set assign to parent",C) -C.assign=assign}}} -switch(token){case 'await': -return new $AwaitCtx(C) -case 'id': -return new $IdCtx(new $ExprCtx(C,'id',commas),value) -case 'str': -return new $StringCtx(new $ExprCtx(C,'str',commas),value) -case 'bytes': -return new $StringCtx(new $ExprCtx(C,'bytes',commas),value) -case 'int': -return new $IntCtx(new $ExprCtx(C,'int',commas),value) -case 'float': -return new $FloatCtx(new $ExprCtx(C,'float',commas),value) -case 'imaginary': -return new $ImaginaryCtx( -new $ExprCtx(C,'imaginary',commas),value) -case '(': -return new $ListOrTupleCtx( -new $ExprCtx(C,'tuple',commas),'tuple') -case '[': -return new $ListOrTupleCtx( -new $ExprCtx(C,'list',commas),'list') -case '{': -return new $DictOrSetCtx( -new $ExprCtx(C,'dict_or_set',commas)) -case '.': -return new $EllipsisCtx( -new $ExprCtx(C,'ellipsis',commas)) -case 'not': -if(C.type=='op' && C.op=='is'){ -C.op='is_not' -return C} -return new $NotCtx(new $ExprCtx(C,'not',commas)) -case 'lambda': -return new $LambdaCtx(new $ExprCtx(C,'lambda',commas)) -case 'op': -var tg=value -switch(tg){case '*': -C.parent.tree.pop() -var commas=C.with_commas -C=C.parent -return new $PackedCtx( -new $ExprCtx(C,'expr',commas)) -case '-': -case '~': -case '+': -C.parent.tree.pop() -var left=new $UnaryCtx(C.parent,tg) -if(tg=='-'){var op_expr=new $OpCtx(left,'unary_neg')}else if(tg=='+'){var op_expr=new $OpCtx(left,'unary_pos')}else{var op_expr=new $OpCtx(left,'unary_inv')} -return new $AbstractExprCtx(op_expr,false) -case 'not': -C.parent.tree.pop() -var commas=C.with_commas -C=C.parent -return new $NotCtx( -new $ExprCtx(C,'not',commas))} -$_SyntaxError(C,'token '+token+' after '+ -C) -case '=': -$_SyntaxError(C,token) -case 'yield': -return new $AbstractExprCtx(new $YieldCtx(C),true) -case ':': -if(C.parent.type=="sub" || -(C.parent.type=="list_or_tuple" && -C.parent.parent.type=="sub")){return new $AbstractExprCtx(new $SliceCtx(C.parent),false)} -return $transition(C.parent,token,value) -case ')': -case ',': -switch(C.parent.type){case 'slice': -case 'list_or_tuple': -case 'call_arg': -case 'op': -case 'yield': -break -default: -$_SyntaxError(C,token)}} -return $transition(C.parent,token,value) -case 'annotation': -if(token=="eol" && C.tree.length==1 && -C.tree[0].tree.length==0){$_SyntaxError(C,"empty annotation")}else if(token==':' && C.parent.type !="def"){$_SyntaxError(C,"more than one annotation")} -return $transition(C.parent,token) -case 'assert': -if(token=='eol'){return $transition(C.parent,token)} -$_SyntaxError(C,token) -case 'assign': -if(token=='eol'){if(C.tree[1].type=='abstract_expr'){$_SyntaxError(C,'token '+token+' after '+ -C)} -C.guess_type() -return $transition(C.parent,'eol')} -$_SyntaxError(C,'token '+token+' after '+C) -case 'async': -if(token=="def"){return $transition(C.parent,token,value)}else if(token=="for" ||token=="with"){var ctx=$transition(C.parent,token,value) -ctx.parent.async=true -return ctx} -$_SyntaxError(C,'token '+token+' after '+C) -case 'attribute': -if(token==='id'){var name=value -if(noassign[name]===true){$_SyntaxError(C,["cannot assign to "+name])} -name=$mangle(name,C) -C.name=name -return C.parent} -$_SyntaxError(C,token) -case 'augm_assign': -if(token=='eol'){if(C.tree[1].type=='abstract_expr'){$_SyntaxError(C,'token '+token+' after '+ -C)} -return $transition(C.parent,'eol')} -$_SyntaxError(C,'token '+token+' after '+C) -case 'await': -C.parent.is_await=true -return $transition(C.parent,token,value) -case 'break': -if(token=='eol'){return $transition(C.parent,'eol')} -$_SyntaxError(C,token) -case 'call': -switch(token){case ',': -if(C.expect=='id'){$_SyntaxError(C,token)} -C.expect='id' -return C -case 'await': -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -C.expect=',' -return $transition(new $CallArgCtx(C),token,value) -case ')': -C.end=$pos -return C.parent -case 'op': -C.expect=',' -switch(value){case '-': -case '~': -case '+': -C.expect=',' -return $transition(new $CallArgCtx(C),token,value) -case '*': -C.has_star=true -return new $StarArgCtx(C) -case '**': -C.has_dstar=true -return new $DoubleStarArgCtx(C)} -$_SyntaxError(C,token)} -return $transition(C.parent,token,value) -case 'call_arg': -switch(token){case 'await': -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -if(C.expect=='id'){C.expect=',' -var expr=new $AbstractExprCtx(C,false) -return $transition(expr,token,value)} -break -case '=': -if(C.expect==','){return new $ExprCtx(new $KwArgCtx(C),'kw_value',false)} -break -case 'for': -var lst=new $ListOrTupleCtx(C,'gen_expr') -lst.vars=C.vars -lst.locals=C.locals -lst.intervals=[C.start] -C.tree.pop() -lst.expression=C.tree -C.tree=[lst] -lst.tree=[] -var comp=new $ComprehensionCtx(lst) -return new $TargetListCtx(new $CompForCtx(comp)) -case 'op': -if(C.expect=='id'){var op=value -C.expect=',' -switch(op){case '+': -case '-': -case '~': -return $transition(new $AbstractExprCtx(C,false),token,op) -case '*': -return new $StarArgCtx(C) -case '**': -return new $DoubleStarArgCtx(C)}} -$_SyntaxError(C,'token '+token+' after '+C) -case ')': -if(C.parent.kwargs && -$B.last(C.parent.tree).tree[0]&& -['kwarg','star_arg','double_star_arg']. -indexOf($B.last(C.parent.tree).tree[0].type)==-1){$_SyntaxError(C,['non-keyword arg after keyword arg'])} -if(C.tree.length > 0){var son=C.tree[C.tree.length-1] -if(son.type=='list_or_tuple' && -son.real=='gen_expr'){son.intervals.push($pos)}} -return $transition(C.parent,token) -case ':': -if(C.expect==',' && -C.parent.parent.type=='lambda'){return $transition(C.parent.parent,token)} -break -case ',': -if(C.expect==','){if(C.parent.kwargs && -['kwarg','star_arg','double_star_arg']. -indexOf($B.last(C.parent.tree).tree[0].type)==-1){$_SyntaxError(C,['non-keyword arg after keyword arg'])} -return $transition(C.parent,token,value)}} -$_SyntaxError(C,'token '+token+' after '+C) -case 'class': -switch(token){case 'id': -if(C.expect=='id'){C.set_name(value) -C.expect='(:' -return C} -break -case '(': -return new $CallCtx(C) -case ':': -return $BodyCtx(C)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'comp_if': -return $transition(C.parent,token,value) -case 'comp_for': -if(token=='in' && C.expect=='in'){C.expect=null -return new $AbstractExprCtx(new $CompIterableCtx(C),true)} -if(C.expect===null){ -return $transition(C.parent,token,value)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'comp_iterable': -return $transition(C.parent,token,value) -case 'comprehension': -switch(token){case 'if': -return new $AbstractExprCtx(new $CompIfCtx(C),false) -case 'for': -return new $TargetListCtx(new $CompForCtx(C))} -return $transition(C.parent,token,value) -case 'condition': -if(token==':'){if(C.tree[0].type=="abstract_expr" && -C.tree[0].tree.length==0){ -$_SyntaxError(C,'token '+token+' after '+C)} -return $BodyCtx(C)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'continue': -if(token=='eol'){return C.parent} -$_SyntaxError(C,'token '+token+' after '+C) -case 'ctx_manager_alias': -switch(token){case ',': -case ':': -C.parent.set_alias(C.tree[0].tree[0]) -return $transition(C.parent,token,value)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'decorator': -if(token=='id' && C.tree.length==0){return $transition(new $DecoratorExprCtx(C),token,value)} -if(token=='eol'){return $transition(C.parent,token)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'decorator_expression': -if(C.expects===undefined){if(token=="id"){C.names.push(value) -C.expects="." -return C} -$_SyntaxError(C,'token '+token+' after '+C)}else if(C.is_call && token !=="eol"){$_SyntaxError(C,'token '+token+' after '+C)}else if(token=="id" && C.expects=="id"){C.names.push(value) -C.expects="." -return C}else if(token=="." && C.expects=="."){C.expects="id" -return C}else if(token=="(" && C.expects=="."){if(! C.is_call){C.is_call=true -return new $CallCtx(C)}}else if(token=='eol'){return $transition(C.parent,token)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'def': -switch(token){case 'id': -if(C.name){$_SyntaxError(C,'token '+token+' after '+C)} -C.set_name(value) -return C -case '(': -if(C.name==null){$_SyntaxError(C,'token '+token+ -' after '+C)} -C.has_args=true; -return new $FuncArgs(C) -case 'annotation': -return new $AbstractExprCtx(new $AnnotationCtx(C),true) -case ':': -if(C.has_args){return $BodyCtx(C)}} -$_SyntaxError(C,'token '+token+' after '+C) -case 'del': -if(token=='eol'){return $transition(C.parent,token)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'dict_or_set': -if(C.closed){switch(token){case '[': -return new $AbstractExprCtx(new $SubCtx(C.parent),false) -case '(': -return new $CallArgCtx(new $CallCtx(C.parent))} -return $transition(C.parent,token,value)}else{if(C.expect==','){switch(token){case '}': -switch(C.real){case 'dict_or_set': -if(C.tree.length !=1){break} -C.real='set' -case 'set': -case 'set_comp': -case 'dict_comp': -C.items=C.tree -C.tree=[] -C.closed=true -return C -case 'dict': -if(C.nb_dict_items()% 2==0){C.items=C.tree -C.tree=[] -C.closed=true -return C}} -$_SyntaxError(C,'token '+token+ -' after '+C) -case ',': -if(C.real=='dict_or_set'){C.real='set'} -if(C.real=='dict' && -C.nb_dict_items()% 2){$_SyntaxError(C,'token '+token+ -' after '+C)} -C.expect='id' -return C -case ':': -if(C.real=='dict_or_set'){C.real='dict'} -if(C.real=='dict'){C.expect=',' -return new $AbstractExprCtx(C,false)}else{$_SyntaxError(C,'token '+token+ -' after '+C)} -case 'for': -if(C.real=='dict_or_set'){C.real='set_comp'} -else{C.real='dict_comp'} -var lst=new $ListOrTupleCtx(C,'dict_or_set_comp') -lst.intervals=[C.start+1] -lst.vars=C.vars -C.tree.pop() -lst.expression=C.tree -C.tree=[lst] -lst.tree=[] -var comp=new $ComprehensionCtx(lst) -return new $TargetListCtx(new $CompForCtx(comp))} -$_SyntaxError(C,'token '+token+' after '+C)}else if(C.expect=='id'){switch(token){case '}': -if(C.tree.length==0){ -C.items=[] -C.real='dict'}else{ -C.items=C.tree} -C.tree=[] -C.closed=true -return C -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -C.expect=',' -var expr=new $AbstractExprCtx(C,false) -return $transition(expr,token,value) -case 'op': -switch(value){case '*': -case '**': -C.expect="," -var expr=new $AbstractExprCtx(C,false) -expr.packed=value.length -if(C.real=="dict_or_set"){C.real=value=="*" ? "set" : -"dict"}else if( -(C.real=="set" && value=="**")|| -(C.real=="dict" && value=="*")){$_SyntaxError(C,'token '+token+ -' after '+C)} -return expr -case '+': -return C -case '-': -case '~': -C.expect=',' -var left=new $UnaryCtx(C,value) -if(value=='-'){var op_expr=new $OpCtx(left,'unary_neg')}else if(value=='+'){var op_expr=new $OpCtx(left,'unary_pos')}else{var op_expr=new $OpCtx(left,'unary_inv')} -return new $AbstractExprCtx(op_expr,false)} -$_SyntaxError(C,'token '+token+ -' after '+C)} -$_SyntaxError(C,'token '+token+' after '+C)} -return $transition(C.parent,token,value)} -case 'double_star_arg': -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -return $transition(new $AbstractExprCtx(C,false),token,value) -case ',': -case ')': -return $transition(C.parent,token) -case ':': -if(C.parent.parent.type=='lambda'){return $transition(C.parent.parent,token)}} -$_SyntaxError(C,'token '+token+' after '+C) -case 'ellipsis': -if(token=='.'){C.nbdots++ -if(C.nbdots==3 && $pos-C.start==2){C.$complete=true} -return C}else{if(! C.$complete){$pos-- -$_SyntaxError(C,'token '+token+' after '+ -C)}else{return $transition(C.parent,token,value)}} -case 'except': -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case 'not': -case 'lambda': -if(C.expect=='id'){C.expect='as' -return $transition(new $AbstractExprCtx(C,false),token,value)} -case 'as': -if(C.expect=='as' && -C.has_alias===undefined){C.expect='alias' -C.has_alias=true -return C} -case 'id': -if(C.expect=='alias'){C.expect=':' -C.set_alias(value) -return C} -break -case ':': -var _ce=C.expect -if(_ce=='id' ||_ce=='as' ||_ce==':'){return $BodyCtx(C)} -break -case '(': -if(C.expect=='id' && C.tree.length==0){C.parenth=true -return C} -break -case ')': -if(C.expect==',' ||C.expect=='as'){C.expect='as' -return C} -case ',': -if(C.parenth !==undefined && -C.has_alias===undefined && -(C.expect=='as' ||C.expect==',')){C.expect='id' -return C}} -$_SyntaxError(C,'token '+token+' after '+C.expect) -case 'expr': -switch(token){case 'bytes': -case 'float': -case 'id': -case 'imaginary': -case 'int': -case 'lambda': -case 'pass': -case 'str': -case '{': -$_SyntaxError(C,'token '+token+' after '+ -C) -break -case '[': -case '(': -case '.': -case 'not': -if(C.expect=='expr'){C.expect=',' -return $transition(new $AbstractExprCtx(C,false),token,value)}} -switch(token){case 'not': -if(C.expect==','){return new $ExprNot(C)} -break -case 'in': -if(C.parent.type=='target_list'){ -return $transition(C.parent,token)} -if(C.expect==','){return $transition(C,'op','in')} -break -case ',': -if(C.expect==','){if(C.with_commas){if($parent_match(C,{type:"yield","from":true})){$_SyntaxError(C,"no implicit tuple for yield from")} -C.parent.tree.pop() -var tuple=new $ListOrTupleCtx(C.parent,'tuple') -tuple.implicit=true -tuple.has_comma=true -tuple.tree=[C] -C.parent=tuple -return tuple}} -return $transition(C.parent,token) -case '.': -return new $AttrCtx(C) -case '[': -return new $AbstractExprCtx(new $SubCtx(C),true) -case '(': -return new $CallCtx(C) -case 'op': -var op_parent=C.parent,op=value -if(op_parent.type=='ternary' && op_parent.in_else){var new_op=new $OpCtx(C,op) -return new $AbstractExprCtx(new_op,false)} -var op1=C.parent,repl=null -while(1){if(op1.type=='expr'){op1=op1.parent} -else if(op1.type=='op' && -$op_weight[op1.op]>=$op_weight[op]&& -!(op1.op=='**' && op=='**')){ -repl=op1 -op1=op1.parent}else if(op1.type=="not" && -$op_weight['not']> $op_weight[op]){repl=op1 -op1=op1.parent}else{break}} -if(repl===null){while(1){if(C.parent !==op1){C=C.parent -op_parent=C.parent}else{break}} -C.parent.tree.pop() -var expr=new $ExprCtx(op_parent,'operand',C.with_commas) -expr.expect=',' -C.parent=expr -var new_op=new $OpCtx(C,op) -return new $AbstractExprCtx(new_op,false)}else{ -if(op==='and' ||op==='or'){while(repl.parent.type=='not'|| -(repl.parent.type=='expr' && -repl.parent.parent.type=='not')){ -repl=repl.parent -op_parent=repl.parent}}} -if(repl.type=='op'){var _flag=false -switch(repl.op){case '<': -case '<=': -case '==': -case '!=': -case 'is': -case '>=': -case '>': -_flag=true} -if(_flag){switch(op){case '<': -case '<=': -case '==': -case '!=': -case 'is': -case '>=': -case '>': -var c2=repl.tree[1], -c2js=c2.to_js() -var c2_clone=new Object() -for(var attr in c2){c2_clone[attr]=c2[attr]} -var vname="$c"+chained_comp_num -c2.to_js=function(){return vname} -c2_clone.to_js=function(){return vname} -chained_comp_num++ -while(repl.parent && repl.parent.type=='op'){if($op_weight[repl.parent.op]< -$op_weight[repl.op]){repl=repl.parent}else{break}} -repl.parent.tree.pop() -var and_expr=new $OpCtx(repl,'and') -and_expr.wrap={'name':vname,'js':c2js} -c2_clone.parent=and_expr -and_expr.tree.push('xxx') -var new_op=new $OpCtx(c2_clone,op) -return new $AbstractExprCtx(new_op,false)}}} -repl.parent.tree.pop() -var expr=new $ExprCtx(repl.parent,'operand',false) -expr.tree=[op1] -repl.parent=expr -var new_op=new $OpCtx(repl,op) -return new $AbstractExprCtx(new_op,false) -case 'augm_assign': -var parent=C.parent -while(parent){if(parent.type=="assign" ||parent.type=="augm_assign"){$_SyntaxError(C,"augmented assign inside assign")}else if(parent.type=="op"){$_SyntaxError(C,["can't assign to operator"])} -parent=parent.parent} -if(C.expect==','){return new $AbstractExprCtx( -new $AugmentedAssignCtx(C,value),true)} -return $transition(C.parent,token,value) -case ":": -if(C.parent.type=="sub" || -(C.parent.type=="list_or_tuple" && -C.parent.parent.type=="sub")){return new $AbstractExprCtx(new $SliceCtx(C.parent),false)}else if(C.parent.type=="slice"){return $transition(C.parent,token,value)}else if(C.parent.type=="node"){ -if(C.tree.length==1){var child=C.tree[0] -if(["id","sub","attribute"].indexOf(child.type)>-1 || -(child.real=="tuple" && child.expect=="," && -child.tree.length==1)){return new $AbstractExprCtx(new $AnnotationCtx(C),false)}} -$_SyntaxError(C,"invalid target for annotation")} -break -case '=': -function has_parent(ctx,type){ -while(ctx.parent){if(ctx.parent.type==type){return ctx.parent} -ctx=ctx.parent} -return false} -var annotation -if(C.expect==','){if(C.parent.type=="call_arg"){ -if(C.tree[0].type !="id"){$_SyntaxError(C,["keyword can't be an expression"])} -return new $AbstractExprCtx(new $KwArgCtx(C),true)}else if(annotation=has_parent(C,"annotation")){return $transition(annotation,token,value)}else if(C.parent.type=="op"){ -$_SyntaxError(C,["can't assign to operator"])}else if(C.parent.type=="list_or_tuple"){ -for(var i=0;i < C.parent.tree.length;i++){var item=C.parent.tree[i] -if(item.type=="expr" && item.name=="operand"){$_SyntaxError(C,["can't assign to operator"])}}} -while(C.parent !==undefined){C=C.parent -if(C.type=="condition"){$_SyntaxError(C,'token '+token+' after ' -+C)}else if(C.type=="augm_assign"){$_SyntaxError(C,"assign inside augmented assign")}} -C=C.tree[0] -return new $AbstractExprCtx(new $AssignCtx(C),true)} -break -case ':=': -if(C.tree.length==1 && -C.tree[0].type=="id"){var scope=$get_scope(C),name=C.tree[0].value -$bind(name,scope,C) -var parent=C.parent -parent.tree.pop() -var assign_expr=new $AbstractExprCtx(parent,false) -assign_expr.assign=C.tree[0] -return assign_expr} -$_SyntaxError(C,'token '+token+' after '+C) -case 'if': -var in_comp=false,ctx=C.parent -while(true){if(ctx.type=="list_or_tuple"){ -break}else if(ctx.type=='comp_for' ||ctx.type=="comp_if"){in_comp=true -break} -if(ctx.parent !==undefined){ctx=ctx.parent} -else{break}} -if(in_comp){break} -var ctx=C -while(ctx.parent && ctx.parent.type=='op'){ctx=ctx.parent -if(ctx.type=='expr' && -ctx.parent && ctx.parent.type=='op'){ctx=ctx.parent}} -return new $AbstractExprCtx(new $TernaryCtx(ctx),false) -case 'eol': -if(["dict_or_set","list_or_tuple"].indexOf(C.parent.type)==-1){var t=C.tree[0] -if(t.type=="packed" || -(t.type=="call" && t.func.type=="packed")){$_SyntaxError(C,["can't use starred expression here"])}}} -return $transition(C.parent,token) -case 'expr_not': -if(token=='in'){ -C.parent.tree.pop() -return new $AbstractExprCtx( -new $OpCtx(C.parent,'not_in'),false)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'for': -switch(token){case 'in': -return new $AbstractExprCtx( -new $ExprCtx(C,'target list',true),false) -case ':': -if(C.tree.length < 2 -||C.tree[1].tree[0].type=="abstract_expr"){$_SyntaxError(C,'token '+token+' after '+ -C)} -return $BodyCtx(C)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'from': -switch(token){case 'id': -if(C.expect=='id'){C.add_name(value) -C.expect=',' -return C} -if(C.expect=='alias'){C.aliases[C.names[C.names.length-1]]= -value -C.expect=',' -return C} -case '.': -if(C.expect=='module'){if(token=='id'){C.module+=value} -else{C.module+='.'} -return C} -case 'import': -if(C.expect=='module'){C.expect='id' -return C} -case 'op': -if(value=='*' && C.expect=='id' -&& C.names.length==0){if($get_scope(C).ntype !=='module'){$_SyntaxError(C,["import * only allowed at module level"])} -C.add_name('*') -C.expect='eol' -return C} -case ',': -if(C.expect==','){C.expect='id' -return C} -case 'eol': -switch(C.expect){case ',': -case 'eol': -C.bind_names() -return $transition(C.parent,token) -case 'id': -$_SyntaxError(C,['trailing comma not allowed without '+ -'surrounding parentheses']) -default: -$_SyntaxError(C,['invalid syntax'])} -case 'as': -if(C.expect==',' ||C.expect=='eol'){C.expect='alias' -return C} -case '(': -if(C.expect=='id'){C.expect='id' -return C} -case ')': -if(C.expect==',' ||C.expect=='id'){C.expect='eol' -return C}} -$_SyntaxError(C,'token '+token+' after '+C) -case 'func_arg_id': -switch(token){case '=': -if(C.expect=='='){C.has_default=true -var def_ctx=C.parent.parent -if(C.parent.has_star_arg){def_ctx.default_list.push(def_ctx.after_star.pop())}else{def_ctx.default_list.push(def_ctx.positional_list.pop())} -return new $AbstractExprCtx(C,false)} -break -case ',': -case ')': -if(C.parent.has_default && C.tree.length==0 && -C.parent.has_star_arg===undefined){$pos-=C.name.length -$_SyntaxError(C,['non-default argument follows default argument'])}else{return $transition(C.parent,token)} -case ':': -if(C.has_default){ -$_SyntaxError(C,'token '+token+' after '+ -C)} -return new $AbstractExprCtx(new $AnnotationCtx(C),false)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'func_args': -switch(token){case 'id': -if(C.has_kw_arg){$_SyntaxError(C,'duplicate kw arg')} -if(C.expect=='id'){C.expect=',' -if(C.names.indexOf(value)>-1){$_SyntaxError(C,['duplicate argument '+value+ -' in function definition'])}} -return new $FuncArgIdCtx(C,value) -case ',': -if(C.expect==','){C.expect='id' -return C} -$_SyntaxError(C,'token '+token+' after '+ -C) -case ')': -return C.parent -case 'op': -if(C.has_kw_arg){$_SyntaxError(C,'duplicate kw arg')} -var op=value -C.expect=',' -if(op=='*'){if(C.has_star_arg){$_SyntaxError(C,'duplicate star arg')} -return new $FuncStarArgCtx(C,'*')} -if(op=='**'){return new $FuncStarArgCtx(C,'**')} -$_SyntaxError(C,'token '+op+' after '+C)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'func_star_arg': -switch(token){case 'id': -if(C.name===undefined){if(C.parent.names.indexOf(value)>-1){$_SyntaxError(C,['duplicate argument '+value+ -' in function definition'])}} -C.set_name(value) -C.parent.names.push(value) -return C -case ',': -case ')': -if(C.name===undefined){ -C.set_name('*') -C.parent.names.push('*')} -return $transition(C.parent,token) -case ':': -if(C.name===undefined){$_SyntaxError(C,'annotation on an unnamed parameter')} -return new $AbstractExprCtx( -new $AnnotationCtx(C),false)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'global': -switch(token){case 'id': -if(C.expect=='id'){new $IdCtx(C,value) -C.add(value) -C.expect=',' -return C} -break -case ',': -if(C.expect==','){C.expect='id' -return C} -break -case 'eol': -if(C.expect==','){return $transition(C.parent,token)} -break} -$_SyntaxError(C,'token '+token+' after '+C) -case 'id': -switch(token){case '=': -if(C.parent.type=='expr' && -C.parent.parent !==undefined && -C.parent.parent.type=='call_arg'){return new $AbstractExprCtx( -new $KwArgCtx(C.parent),false)} -return $transition(C.parent,token,value) -case 'op': -return $transition(C.parent,token,value) -case 'id': -case 'str': -case 'int': -case 'float': -case 'imaginary': -if(C.value=="print"){$_SyntaxError(C,["missing parenthesis in call to 'print'"])} -$_SyntaxError(C,'token '+token+' after '+ -C)} -if(C.value=="async"){ -if(token=='def'){C.parent.parent.tree=[] -var ctx=$transition(C.parent.parent,token,value) -ctx.async=true -return ctx}} -return $transition(C.parent,token,value) -case 'import': -switch(token){case 'id': -if(C.expect=='id'){new $ImportedModuleCtx(C,value) -C.expect=',' -return C} -if(C.expect=='qual'){C.expect=',' -C.tree[C.tree.length-1].name+= -'.'+value -C.tree[C.tree.length-1].alias+= -'.'+value -return C} -if(C.expect=='alias'){C.expect=',' -C.tree[C.tree.length-1].alias= -value -return C} -break -case '.': -if(C.expect==','){C.expect='qual' -return C} -break -case ',': -if(C.expect==','){C.expect='id' -return C} -break -case 'as': -if(C.expect==','){C.expect='alias' -return C} -break -case 'eol': -if(C.expect==','){C.bind_names() -return $transition(C.parent,token)} -break} -$_SyntaxError(C,'token '+token+' after '+C) -case 'imaginary': -case 'int': -case 'float': -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case 'not': -case 'lambda': -$_SyntaxError(C,'token '+token+' after '+ -C)} -return $transition(C.parent,token,value) -case 'kwarg': -if(token==','){return new $CallArgCtx(C.parent.parent)} -return $transition(C.parent,token) -case 'lambda': -if(token==':' && C.args===undefined){C.args=C.tree -C.tree=[] -C.body_start=$pos -return new $AbstractExprCtx(C,false)} -if(C.args !==undefined){ -C.body_end=$pos -return $transition(C.parent,token)} -if(C.args===undefined){return $transition(new $CallCtx(C),token,value)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'list_or_tuple': -if(C.closed){if(token=='['){return new $AbstractExprCtx( -new $SubCtx(C.parent),false)} -if(token=='('){return new $CallCtx(C.parent)} -return $transition(C.parent,token,value)}else{if(C.expect==','){switch(C.real){case 'tuple': -case 'gen_expr': -if(token==')'){C.closed=true -if(C.real=='gen_expr'){C.intervals.push($pos)} -if(C.parent.type=="packed"){return C.parent.parent} -return C.parent} -break -case 'list': -case 'list_comp': -if(token==']'){C.closed=true -if(C.real=='list_comp'){C.intervals.push($pos)} -if(C.parent.type=="packed"){if(C.parent.tree.length > 0){return C.parent.tree[0]}else{return C.parent.parent}} -return C.parent} -break -case 'dict_or_set_comp': -if(token=='}'){C.intervals.push($pos) -return $transition(C.parent,token)} -break} -switch(token){case ',': -if(C.real=='tuple'){C.has_comma=true} -C.expect='id' -return C -case 'for': -if(C.real=='list'){C.real='list_comp'} -else{C.real='gen_expr'} -C.intervals=[C.start+1] -C.expression=C.tree -C.tree=[] -var comp=new $ComprehensionCtx(C) -return new $TargetListCtx(new $CompForCtx(comp))} -return $transition(C.parent,token,value)}else if(C.expect=='id'){switch(C.real){case 'tuple': -if(token==')'){C.closed=true -return C.parent} -if(token=='eol' && C.implicit===true){C.closed=true -return $transition(C.parent,token)} -break -case 'gen_expr': -if(token==')'){C.closed=true -return $transition(C.parent,token)} -break -case 'list': -if(token==']'){C.closed=true -return C} -break} -switch(token){case '=': -if(C.real=='tuple' && -C.implicit===true){C.closed=true -C.parent.tree.pop() -var expr=new $ExprCtx(C.parent,'tuple',false) -expr.tree=[C] -C.parent=expr -return $transition(C.parent,token)} -break -case ')': -break -case ']': -if(C.real=='tuple' && -C.implicit===true){ -return $transition(C.parent,token,value)}else{break} -case ',': -$_SyntaxError(C,'unexpected comma inside list') -default: -C.expect=',' -var expr=new $AbstractExprCtx(C,false) -return $transition(expr,token,value)}}else{return $transition(C.parent,token,value)}} -case 'list_comp': -switch(token){case ']': -return C.parent -case 'in': -return new $ExprCtx(C,'iterable',true) -case 'if': -return new $ExprCtx(C,'condition',true)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'node': -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case 'not': -case 'lambda': -case '.': -var expr=new $AbstractExprCtx(C,true) -return $transition(expr,token,value) -case 'op': -switch(value){case '*': -case '+': -case '-': -case '~': -var expr=new $AbstractExprCtx(C,true) -return $transition(expr,token,value)} -break -case 'async': -return new $AsyncCtx(C) -case 'await': -return new $AbstractExprCtx(new $AwaitCtx(C),true) -case 'class': -return new $ClassCtx(C) -case 'continue': -return new $ContinueCtx(C) -case '__debugger__': -return new $DebuggerCtx(C) -case 'break': -return new $BreakCtx(C) -case 'def': -return new $DefCtx(C) -case 'for': -return new $TargetListCtx(new $ForExpr(C)) -case 'if': -case 'while': -return new $AbstractExprCtx( -new $ConditionCtx(C,token),false) -case 'elif': -var previous=$previous(C) -if(['condition'].indexOf(previous.type)==-1 || -previous.token=='while'){$_SyntaxError(C,'elif after '+previous.type)} -return new $AbstractExprCtx( -new $ConditionCtx(C,token),false) -case 'else': -var previous=$previous(C) -if(['condition','except','for']. -indexOf(previous.type)==-1){$_SyntaxError(C,'else after '+previous.type)} -return new $SingleKwCtx(C,token) -case 'finally': -var previous=$previous(C) -if(['try','except'].indexOf(previous.type)==-1 && -(previous.type !='single_kw' || -previous.token !='else')){$_SyntaxError(C,'finally after '+previous.type)} -return new $SingleKwCtx(C,token) -case 'try': -return new $TryCtx(C) -case 'except': -var previous=$previous(C) -if(['try','except'].indexOf(previous.type)==-1){$_SyntaxError(C,'except after '+previous.type)} -return new $ExceptCtx(C) -case 'assert': -return new $AbstractExprCtx( -new $AssertCtx(C),'assert',true) -case 'from': -return new $FromCtx(C) -case 'import': -return new $ImportCtx(C) -case 'global': -return new $GlobalCtx(C) -case 'nonlocal': -return new $NonlocalCtx(C) -case 'lambda': -return new $LambdaCtx(C) -case 'pass': -return new $PassCtx(C) -case 'raise': -return new $AbstractExprCtx(new $RaiseCtx(C),true) -case 'return': -return new $AbstractExprCtx(new $ReturnCtx(C),true) -case 'with': -return new $AbstractExprCtx(new $WithCtx(C),false) -case 'yield': -return new $AbstractExprCtx(new $YieldCtx(C),true) -case 'del': -return new $AbstractExprCtx(new $DelCtx(C),true) -case '@': -return new $DecoratorCtx(C) -case 'eol': -if(C.tree.length==0){ -C.node.parent.children.pop() -return C.node.parent.C} -return C} -console.log('syntax error','token',token,'after',C) -$_SyntaxError(C,'token '+token+' after '+C) -case 'not': -switch(token){case 'in': -C.parent.parent.tree.pop() -return new $ExprCtx(new $OpCtx(C.parent,'not_in'),'op',false) -case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -var expr=new $AbstractExprCtx(C,false) -return $transition(expr,token,value) -case 'op': -var a=value -if('+'==a ||'-'==a ||'~'==a){var expr=new $AbstractExprCtx(C,false) -return $transition(expr,token,value)}} -return $transition(C.parent,token) -case 'op': -if(C.op===undefined){$_SyntaxError(C,['C op undefined '+C])} -if(C.op.substr(0,5)=='unary' && token !='eol'){if(C.parent.type=='assign' || -C.parent.type=='return'){ -C.parent.tree.pop() -var t=new $ListOrTupleCtx(C.parent,'tuple') -t.tree.push(C) -C.parent=t -return t}} -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -return $transition(new $AbstractExprCtx(C,false),token,value) -case 'op': -switch(value){case '+': -case '-': -case '~': -return new $UnaryCtx(C,value)} -default: -if(C.tree[C.tree.length-1].type== -'abstract_expr'){$_SyntaxError(C,'token '+token+' after '+ -C)}} -return $transition(C.parent,token) -case 'packed': -if(C.tree.length > 0 && token=="["){ -console.log("apply to packed element",C.tree[0]) -return $transition(C.tree[0],token,value)} -if(token=='id'){new $IdCtx(C,value) -C.parent.expect=',' -return C.parent}else if(token=="["){C.parent.expect=',' -return new $ListOrTupleCtx(C,"list")}else if(token=="("){C.parent.expect=',' -return new $ListOrTupleCtx(C,"tuple")}else if(token=="]"){return $transition(C.parent,token,value)} -console.log("syntax error",C,token) -$_SyntaxError(C,'token '+token+' after '+C) -case 'pass': -if(token=='eol'){return C.parent} -$_SyntaxError(C,'token '+token+' after '+C) -case 'raise': -switch(token){case 'id': -if(C.tree.length==0){return new $IdCtx(new $ExprCtx(C,'exc',false),value)} -break -case 'from': -if(C.tree.length > 0){return new $AbstractExprCtx(C,false)} -break -case 'eol': -return $transition(C.parent,token)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'return': -return $transition(C.parent,token) -case 'single_kw': -if(token==':'){return $BodyCtx(C)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'slice': -if(token==":"){return new $AbstractExprCtx(C,false)} -return $transition(C.parent,token,value) -case 'star_arg': -switch(token){case 'id': -if(C.parent.type=="target_list"){C.tree.push(value) -C.parent.expect=',' -return C.parent} -return $transition(new $AbstractExprCtx(C,false),token,value) -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case 'not': -case 'lambda': -return $transition(new $AbstractExprCtx(C,false),token,value) -case ',': -return $transition(C.parent,token) -case ')': -return $transition(C.parent,token) -case ':': -if(C.parent.parent.type=='lambda'){return $transition(C.parent.parent,token)}} -$_SyntaxError(C,'token '+token+' after '+C) -case 'str': -switch(token){case '[': -return new $AbstractExprCtx(new $SubCtx(C.parent),false) -case '(': -C.parent.tree[0]=C -return new $CallCtx(C.parent) -case 'str': -C.tree.push(value) -return C} -return $transition(C.parent,token,value) -case 'sub': -switch(token){case 'id': -case 'imaginary': -case 'int': -case 'float': -case 'str': -case 'bytes': -case '[': -case '(': -case '{': -case '.': -case 'not': -case 'lambda': -var expr=new $AbstractExprCtx(C,false) -return $transition(expr,token,value) -case ']': -if(C.parent.packed){return C.parent.tree[0]} -if(C.tree[0].tree.length > 0){return C.parent} -break -case ':': -return new $AbstractExprCtx(new $SliceCtx(C),false) -case ',': -return new $AbstractExprCtx(C,false)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'target_list': -switch(token){case 'id': -if(C.expect=='id'){C.expect=',' -return new $IdCtx( -new $ExprCtx(C,'target',false),value)} -case 'op': -if(C.expect=='id' && value=='*'){ -return new $PackedCtx(C)} -case '(': -case '[': -if(C.expect=='id'){C.expect=',' -return new $TargetListCtx(C)} -case ')': -case ']': -if(C.expect==','){return C.parent} -case ',': -if(C.expect==','){C.expect='id' -return C}} -if(C.expect==','){return $transition(C.parent,token,value)}else if(token=='in'){ -return $transition(C.parent,token,value)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'ternary': -if(token=='else'){C.in_else=true -return new $AbstractExprCtx(C,false)}else if(! C.in_else){$_SyntaxError(C,'token '+token+' after '+C)} -return $transition(C.parent,token,value) -case 'try': -if(token==':'){return $BodyCtx(C)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'unary': -switch(token){case 'int': -case 'float': -case 'imaginary': -var expr=C.parent -C.parent.parent.tree.pop() -if(C.op=='-'){value="-"+value} -else if(C.op=='~'){value=~value} -return $transition(C.parent.parent,token,value) -case 'id': -C.parent.parent.tree.pop() -var expr=new $ExprCtx(C.parent.parent,'call',false) -var expr1=new $ExprCtx(expr,'id',false) -new $IdCtx(expr1,value) -var repl=new $AttrCtx(expr) -if(C.op=='+'){repl.name='__pos__'} -else if(C.op=='-'){repl.name='__neg__'} -else{repl.name='__invert__'} -return expr1 -case 'op': -if('+'==value ||'-'==value){if(C.op===value){C.op='+'} -else{C.op='-'} -return C}} -return $transition(C.parent,token,value) -case 'with': -switch(token){case 'id': -if(C.expect=='id'){C.expect='as' -return $transition( -new $AbstractExprCtx(C,false),token,value)} -$_SyntaxError(C,'token '+token+' after '+C) -case 'as': -return new $AbstractExprCtx(new $AliasCtx(C)) -case ':': -switch(C.expect){case 'id': -case 'as': -case ':': -return $BodyCtx(C)} -break -case '(': -if(C.expect=='id' && C.tree.length==0){C.parenth=true -return C}else if(C.expect=='alias'){C.expect=':' -return new $TargetListCtx(C,false)} -break -case ')': -if(C.expect==',' ||C.expect=='as'){C.expect=':' -return C} -break -case ',': -if(C.parenth !==undefined && -C.has_alias===undefined && -(C.expect==',' ||C.expect=='as')){C.expect='id' -return C}else if(C.expect=='as'){C.expect='id' -return C}else if(C.expect==':'){C.expect='id' -return C} +new_nodes.push($NodeJS(' '+cmenter_name+ +' = $B.$call($B.$getattr('+cmtype_name+', "__aenter__"))'+ +'('+this.cm_name+'),')) +new_nodes.push($NodeJS(" "+this.exc_name+" = false")) +js="" +if(alias){if(alias.tree[0].tree[0].type !="list_or_tuple"){var js=alias.tree[0].to_js()+' = '+ +'await ($B.promise('+cmenter_name+'))' +new_nodes.push($NodeJS(js))}else{ +var new_node=new $Node(),ctx=new $NodeCtx(new_node),expr=new $ExprCtx(ctx,"left",false) +expr.tree.push(alias.tree[0].tree[0]) +alias.tree[0].tree[0].parent=expr +var assign=new $AssignCtx(expr) +new $RawJSCtx(assign,'await ($B.promise('+ +cmenter_name+'))') +new_nodes.push(new_node)}}else{new_nodes.push($NodeJS('await ($B.promise('+cmenter_name+'))'))} +var try_node=new $NodeJS('try') +node.children.forEach(function(child){try_node.add(child)}) +new_nodes.push(try_node) +var catch_node=new $NodeJS('catch(err)') +new_nodes.push(catch_node) +catch_node.add($NodeJS(this.exc_name+' = true')) +catch_node.add($NodeJS('var '+err_name+ +' = $B.imported["_sys"].exc_info()')) +var if_node=$NodeJS('if(! await ($B.promise('+ +this.cmexit_name+'('+this.cm_name+', '+err_name+'[0], '+ +err_name+'[1], '+err_name+'[2]))))') +catch_node.add(if_node) +if_node.add($NodeJS('$B.$raise()')) +var else_node=$NodeJS('if(! '+this.exc_name+')') +new_nodes.push(else_node) +else_node.add($NodeJS('await ($B.promise('+this.cmexit_name+'('+ +this.cm_name+', _b_.None, _b_.None, _b_.None)))')) +node.parent.children.splice(rank,1) +for(var i=new_nodes.length-1;i >=0;i--){node.parent.insert(rank,new_nodes[i])} +node.children=[] +return 0} +$WithCtx.prototype.to_js=function(){this.js_processed=true +var indent=$get_node(this).indent,h=' '.repeat(indent),num=this.num +var head=this.prefix=="" ? "var " :this.prefix,cm_name='$ctx_manager'+num,cme_name=head+'$ctx_manager_exit'+num,exc_name=head+'$exc'+num,val_name='$value'+num +return 'var '+cm_name+' = '+this.tree[0].to_js()+'\n'+ +h+cme_name+' = $B.$getattr('+cm_name+',"__exit__")\n'+ +h+'var '+val_name+' = $B.$getattr('+cm_name+',"__enter__")()\n'+ +h+exc_name+' = true\n'} +var $YieldCtx=$B.parser.$YieldCtx=function(C,is_await){ +this.type='yield' +this.parent=C +this.tree=[] +this.is_await=is_await +C.tree[C.tree.length]=this +if(C.type=="list_or_tuple" && C.tree.length > 1){$_SyntaxError(C,"non-parenthesized yield")} +if($parent_match(C,{type:"annotation"})){$_SyntaxError(C,["'yield' outside function"])} +var parent=this +while(true){var list_or_tuple=$parent_match(parent,{type:"list_or_tuple"}) +if(list_or_tuple){list_or_tuple.yields=list_or_tuple.yields ||[] +list_or_tuple.yields.push([this,$pos]) +parent=list_or_tuple}else{break}} +var parent=this +while(true){var set_or_dict=$parent_match(parent,{type:"dict_or_set"}) +if(set_or_dict){set_or_dict.yields=set_or_dict.yields ||[] +set_or_dict.yields.push([this,$pos]) +parent=set_or_dict}else{break}} +var root=$get_module(this) +root.yields_func_check=root.yields_func_check ||[] +root.yields_func_check.push([this,$pos]) +var scope=this.scope=$get_scope(this,true),node=$get_node(this) +node.has_yield=this +var in_comp=$parent_match(this,{type:"comprehension"}) +if($get_scope(this).id.startsWith("lc"+$B.lambda_magic)){delete node.has_yield} +if(in_comp){var outermost_expr=in_comp.tree[0].tree[1] +var parent=C +while(parent){if(parent===outermost_expr){break} +parent=parent.parent} +if(! parent){$_SyntaxError(C,["'yield' inside list comprehension"])}} +var in_lambda=false,parent=C +while(parent){if(parent.type=="lambda"){in_lambda=true +this.in_lambda=true break} -$_SyntaxError(C,'token '+token+' after '+ -C.expect) -case 'yield': +parent=parent.parent} +var parent=node.parent +while(parent){if(parent.C && parent.C.tree.length > 0 && +parent.C.tree[0].type=="with"){scope.C.tree[0].$has_yield_in_cm=true +break} +parent=parent.parent} +if(! in_lambda){switch(C.type){case 'node': +break; +case 'assign': +case 'list_or_tuple': +break +default: +$_SyntaxError(C,'yield atom must be inside ()')}}} +$YieldCtx.prototype.toString=function(){return '(yield) '+(this.from ? '(from) ' :'')+this.tree} +$YieldCtx.prototype.transition=function(token,value){var C=this if(token=='from'){ if(C.tree[0].type !='abstract_expr'){ $_SyntaxError(C,"'from' must follow 'yield'")} C.from=true -$add_yield_from_code(C) +C.from_num=$B.UUID() return C.tree[0]} -return $transition(C.parent,token)}} +return $transition(C.parent,token)} +$YieldCtx.prototype.transform=function(node,rank){ +var parent=node.parent +while(parent){if(parent.ctx_manager_num !==undefined){node.parent.insert(rank+1,$NodeJS("$top_frame[1].$has_yield_in_cm = true")) +break} +parent=parent.parent}} +$YieldCtx.prototype.to_js=function(){if(this.from){return `_r${this.from_num}`}else{return "yield "+$to_js(this.tree)}} +$YieldCtx.prototype.check_in_function=function(){if(this.in_lambda){return} +var scope=$get_scope(this),in_func=scope.is_function,func_scope=scope +if(! in_func && scope.is_comp){var parent=scope.parent_block +while(parent.is_comp){parent=parent_block} +in_func=parent.is_function +func_scope=parent} +if(! in_func){$_SyntaxError(this.parent,["'yield' outside function"])}else{var def=func_scope.C.tree[0] +if(! this.is_await){def.type='generator'}}} +var $add_line_num=$B.parser.$add_line_num=function(node,rank,line_info){if(node.type=='module'){var i=0 +while(i < node.children.length){i+=$add_line_num(node.children[i],i,line_info)}}else if(node.type !=='marker'){var elt=node.C.tree[0],offset=1,flag=true,pnode=node,_line_info +while(pnode.parent !==undefined){pnode=pnode.parent} +var mod_id=pnode.id +var line_num=node.line_num +if(line_num===undefined){flag=false} +if(elt.type=='condition' && elt.token=='elif'){flag=false} +else if(elt.type=='except'){flag=false} +else if(elt.type=='single_kw'){flag=false} +if(flag){_line_info=line_info===undefined ? line_num+','+mod_id : +line_info +var js=';$locals.$line_info = "'+_line_info+ +'";if($locals.$f_trace !== _b_.None){$B.trace_line()};'+ +'_b_.None;' +var new_node=new $Node() +new_node.is_line_num=true +new $NodeJSCtx(new_node,js) +node.parent.insert(rank,new_node) +offset=2} +var i=0 +while(i < node.children.length){i+=$add_line_num(node.children[i],i,line_info)} +return offset}else{return 1}} +var $bind=$B.parser.$bind=function(name,scope,C){ +if(scope.nonlocals && scope.nonlocals[name]){ +return} +if(scope.globals && scope.globals.has(name)){var module=$get_module(C) +module.binding[name]=true +return} +if(! C.no_bindings){var node=$get_node(C) +node.bindings=node.bindings ||{} +node.bindings[name]=true} +scope.binding=scope.binding ||{} +if(scope.binding[name]===undefined){scope.binding[name]=true}} +function $parent_match(ctx,obj){ +var flag +while(ctx.parent){flag=true +for(var attr in obj){if(ctx.parent[attr]!=obj[attr]){flag=false +break}} +if(flag){return ctx.parent} +ctx=ctx.parent} +return false} +var $previous=$B.parser.$previous=function(C){var previous=C.node.parent.children[C.node.parent.children.length-2] +if(!previous ||!previous.C){$_SyntaxError(C,'keyword not following correct keyword')} +return previous.C.tree[0]} +var $get_docstring=$B.parser.$get_docstring=function(node){var doc_string='' +if(node.children.length > 0){var firstchild=node.children[0] +if(firstchild.C.tree && firstchild.C.tree.length > 0 && +firstchild.C.tree[0].type=='expr'){var expr=firstchild.C.tree[0].tree[0] +if(expr.type=='str' && !Array.isArray(expr.tree[0])){doc_string=firstchild.C.tree[0].tree[0].to_js()}}} +return doc_string} +var $get_scope=$B.parser.$get_scope=function(C,flag){ +var ctx_node=C.parent +while(ctx_node.type !=='node'){ctx_node=ctx_node.parent} +var tree_node=ctx_node.node,scope=null +while(tree_node.parent && tree_node.parent.type !=='module'){var ntype=tree_node.parent.C.tree[0].type +switch(ntype){case 'def': +case 'class': +case 'generator': +var scope=tree_node.parent +scope.ntype=ntype +scope.is_function=ntype !='class' +return scope} +tree_node=tree_node.parent} +var scope=tree_node.parent ||tree_node +scope.ntype="module" +return scope} +var $get_line_num=$B.parser.$get_line_num=function(C){var ctx_node=$get_node(C),line_num=ctx_node.line_num +if(ctx_node.line_num===undefined){ctx_node=ctx_node.parent +while(ctx_node && ctx_node.line_num===undefined){ctx_node=ctx_node.parent} +if(ctx_node && ctx_node.line_num){line_num=ctx_node.line_num}} +return line_num} +var $get_module=$B.parser.$get_module=function(C){ +var ctx_node=C.parent +while(ctx_node.type !=='node'){ctx_node=ctx_node.parent} +var tree_node=ctx_node.node +if(tree_node.ntype=="module"){return tree_node} +var scope=null +while(tree_node.parent.type !='module'){tree_node=tree_node.parent} +scope=tree_node.parent +scope.ntype="module" +return scope} +var $get_src=$B.parser.$get_src=function(C){ +var node=$get_node(C) +while(node.parent !==undefined){node=node.parent} +return node.src} +var $get_node=$B.parser.$get_node=function(C){var ctx=C +while(ctx.parent){ctx=ctx.parent} +return ctx.node} +var $to_js_map=$B.parser.$to_js_map=function(tree_element){if(tree_element.to_js !==undefined){return tree_element.to_js()} +throw Error('no to_js() for '+tree_element)} +var $to_js=$B.parser.$to_js=function(tree,sep){if(sep===undefined){sep=','} +return tree.map($to_js_map).join(sep)} +var $mangle=$B.parser.$mangle=function(name,C){ +if(name.substr(0,2)=="__" && name.substr(name.length-2)!=="__"){var klass=null,scope=$get_scope(C) +while(true){if(scope.ntype=="module"){return name} +else if(scope.ntype=="class"){var class_name=scope.C.tree[0].name +while(class_name.charAt(0)=='_'){class_name=class_name.substr(1)} +return '_'+class_name+name}else{if(scope.parent && scope.parent.C){scope=$get_scope(scope.C.tree[0])}else{return name}}}}else{return name}} +var $transition=$B.parser.$transition=function(C,token,value){ +return C.transition(token,value)} $B.forbidden=["alert","arguments","case","catch","const","constructor","Date","debugger","delete","default","do","document","enum","export","eval","extends","Error","history","function","instanceof","keys","length","location","Math","message","new","null","Number","RegExp","String","super","switch","this","throw","typeof","var","window","toLocaleString","toString","void"] $B.aliased_names=$B.list2obj($B.forbidden) var s_escaped='abfnrtvxuU"0123456789'+"'"+'\\',is_escaped={} @@ -4476,7 +4674,7 @@ var kwdict=["class","return","break","for","lambda","try","finally","raise","def var unsupported=[] var $indented=["class","def","for","condition","single_kw","try","except","with" ] -var int_pattern=new RegExp("^\\d[0-9_]*(j|J)?"),float_pattern1=new RegExp("^\\d[0-9_]*\\.\\d*([eE][+-]?\\d+)?(j|J)?"),float_pattern2=new RegExp("^\\d[0-9_]*([eE][+-]?\\d+)(j|J)?"),hex_pattern=new RegExp("^0[xX]([0-9a-fA-F_]+)"),octal_pattern=new RegExp("^0[oO]([0-7_]+)"),binary_pattern=new RegExp("^0[bB]([01_]+)") +var int_pattern=/^(\d[0-9_]*)(j|J)?/,float_pattern1=/^(\d[\d_]*)\.(\d+(_\d+)*)?([eE][+-]?\d+(_\d+)*)?(j|J)?/,float_pattern2=/^(\d[\d_]*)([eE][+-]?\d+(_\d+)*)(j|J)?/,hex_pattern=/^0[xX]([\da-fA-F_]+)/,octal_pattern=/^0[oO]([0-7_]+)/,binary_pattern=/^0[bB]([01_]+)/ var C=null var new_node=new $Node(),current=root,name="",_type=null,pos=0,indent=null,string_modifier=false var module=root.module @@ -4489,7 +4687,8 @@ else if(_s=="\t"){ indent++;pos++ if(indent % 8 > 0){indent+=8-indent % 8}}else{break}} var _s=src.charAt(pos) -if(_s=='\n'){pos++;lnum++;indent=null;continue} +if(_s=='\n'){pos++;lnum++;indent=null; +continue} else if(_s=='#'){ var offset=src.substr(pos).search(/\n/) if(offset==-1){break} @@ -4500,6 +4699,7 @@ continue} new_node.indent=indent new_node.line_num=lnum new_node.module=module +new_node.pos=pos if(current.is_body_node){ current.indent=indent} if(indent > current.indent){ @@ -4559,13 +4759,13 @@ end++} escaped=true}else{if(src.charAt(end+1)=='\n'){ end+=2 lnum++}else if(src.substr(end+1,2)=='N{'){ -var end_lit=end+3,re=new RegExp("[-A-Z0-9 ]+"),search=re.exec(src.substr(end_lit)) +var end_lit=end+3,re=new RegExp("[-a-zA-Z0-9 ]+"),search=re.exec(src.substr(end_lit)) if(search===null){$_SyntaxError(C,"(unicode error) "+ "malformed \\N character escape",pos)} var end_lit=end_lit+search[0].length if(src.charAt(end_lit)!="}"){$_SyntaxError(C,"(unicode error) "+ -"malformed \\N character escape",pos)} -var description=search[0] +"malformed \\N character escape")} +var description=search[0].toUpperCase() if($B.unicodedb===undefined){var xhr=new XMLHttpRequest xhr.open("GET",$B.brython_path+"unicode.txt",false) xhr.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){$B.unicodedb=this.responseText}else{console.log("Warning - could not "+ @@ -4575,13 +4775,15 @@ if($B.unicodedb !==undefined){var re=new RegExp("^([0-9A-F]+);"+ description+";.*$","m") search=re.exec($B.unicodedb) if(search===null){$_SyntaxError(C,"(unicode error) "+ -"unknown Unicode character name",pos)} -if(search[1].length==4){zone+="\\u"+search[1] -end=end_lit+1}else{end++}}else{end++}}else{if(end < src.length-1 && +"unknown Unicode character name")} +var cp="0x"+search[1] +zone+=String.fromCodePoint(eval(cp)) +end=end_lit+1}else{end++}}else{if(end < src.length-1 && is_escaped[src.charAt(end+1)]===undefined){zone+='\\'} zone+='\\' escaped=true end++}}}else if(src.charAt(end)=='\n' && _type !='triple_string'){ +console.log(pos,end,src.substring(pos,end)) $pos=end $_SyntaxError(C,["EOL while scanning string literal"])}else if(src.charAt(end)==car){if(_type=="triple_string" && src.substr(end,3)!=car+car+car){zone+=src.charAt(end) @@ -4589,9 +4791,11 @@ end++}else{found=true $pos=pos var $string=zone.substr(1),string='' for(var i=0;i < $string.length;i++){var $car=$string.charAt(i) -if($car==car && -(raw ||(i==0 || -$string.charAt(i-1)!='\\'))){string+='\\'} +if($car==car){if(raw ||(i==0 || +$string.charAt(i-1)!='\\')){string+='\\'}else if(_type=="triple_string"){ +var j=i-1 +while($string.charAt(j)=='\\'){j--} +if((i-j-1)% 2==0){string+='\\'}}} string+=$car} if(fstring){try{var re=new RegExp("\\\\"+car,"g"),string_no_bs=string.replace(re,car) var elts=$B.parse_fstring(string_no_bs)}catch(err){$_SyntaxError(C,[err.toString()])}} @@ -4607,11 +4811,11 @@ end++}} if(!found){if(_type==="triple_string"){$_SyntaxError(C,"Triple string end not found")}else{$_SyntaxError(C,"String end not found")}} continue} if(name=="" && car !='$'){ -if($B.regexIdentifier.exec(car)){name=car +if($B.unicode_tables.XID_Start[car.charCodeAt(0)]){name=car var p0=pos pos++ while(pos < src.length && -$B.regexIdentifier.exec(src.substring(p0,pos+1))){name+=src.charAt(pos) +$B.unicode_tables.XID_Continue[src.charCodeAt(pos)]){name+=src.charAt(pos) pos++}} if(name){if(kwdict.indexOf(name)>-1){$pos=pos-name.length if(unsupported.indexOf(name)>-1){$_SyntaxError(C,"Unsupported Python keyword '"+name+"'")} @@ -4639,8 +4843,16 @@ $pos=pos-name.length C=$transition(C,'id',name)} name="" continue}} -function rmu(numeric_literal){ +function rmuf(numeric_literal){ +if(numeric_literal.search("__")>-1){ +$_SyntaxError(C,"invalid literal")}else if(numeric_literal.endsWith("_")){ +$_SyntaxError(C,"invalid literal")} return numeric_literal.replace(/_/g,"")} +function check_int(numeric_literal){ +rmuf(numeric_literal) +if(numeric_literal.startsWith("0")){if(numeric_literal.substr(1).search(/[^0_]/)>-1){ +$_SyntaxError(C,"invalid literal")}else{return "0"}}} +function rmu(numeric_literal){return numeric_literal.replace(/_/g,"")} switch(car){case ' ': case '\t': pos++ @@ -4649,8 +4861,9 @@ case '.': if(pos < src.length-1 &&/^\d$/.test(src.charAt(pos+1))){ var j=pos+1 while(j < src.length && -src.charAt(j).search(/\d|e|E/)>-1){j++} -C=$transition(C,'float','0'+src.substr(pos,j-pos)) +src.charAt(j).search(/\d|e|E|_/)>-1){j++} +if(src.charAt(j)=="j"){C=$transition(C,'imaginary','0'+rmu(src.substr(pos,j-pos))) +j++}else{C=$transition(C,'float','0'+rmu(src.substr(pos,j-pos)))} pos=j break} $pos=pos @@ -4659,20 +4872,22 @@ pos++ break case '0': var res=hex_pattern.exec(src.substr(pos)) -if(res){C=$transition(C,'int',[16,rmu(res[1])]) +if(res){rmuf(res[1]) +C=$transition(C,'int',[16,rmu(res[1])]) pos+=res[0].length break} var res=octal_pattern.exec(src.substr(pos)) -if(res){C=$transition(C,'int',[8,rmu(res[1])]) +if(res){C=$transition(C,'int',[8,rmuf(res[1])]) pos+=res[0].length break} var res=binary_pattern.exec(src.substr(pos)) -if(res){C=$transition(C,'int',[2,rmu(res[1])]) +if(res){C=$transition(C,'int',[2,rmuf(res[1])]) pos+=res[0].length break} if(src.charAt(pos+1).search(/\d/)>-1){ if(parseInt(src.substr(pos))===0){res=int_pattern.exec(src.substr(pos)) $pos=pos +check_int(res[0]) C=$transition(C,'int',[10,rmu(res[0])]) pos+=res[0].length break}else{$_SyntaxError(C,'invalid literal starting with 0')}} @@ -4687,12 +4902,16 @@ case '7': case '8': case '9': var res=float_pattern1.exec(src.substr(pos)) -if(res){$pos=pos -if(res[2]!==undefined){C=$transition(C,'imaginary',rmu(res[0].substr(0,res[0].length-1)))}else{C=$transition(C,'float',rmu(res[0]))}}else{res=float_pattern2.exec(src.substr(pos)) -if(res){$pos=pos -if(res[2]!==undefined){C=$transition(C,'imaginary',rmu(res[0].substr(0,res[0].length-1)))}else{C=$transition(C,'float',rmu(res[0]))}}else{res=int_pattern.exec(src.substr(pos)) +if(res){check_int(res[1]) +if(res[2]){rmuf(res[2])} $pos=pos -if(res[1]!==undefined){C=$transition(C,'imaginary',rmu(res[0].substr(0,res[0].length-1)))}else{C=$transition(C,'int',[10,rmu(res[0])])}}} +if($B.last(res)!==undefined){C=$transition(C,'imaginary',rmuf(res[0].substr(0,res[0].length-1)))}else{C=$transition(C,'float',rmuf(res[0]))}}else{res=float_pattern2.exec(src.substr(pos)) +if(res){check_int(res[1]) +$pos=pos +if($B.last(res)!==undefined){C=$transition(C,'imaginary',rmuf(res[0].substr(0,res[0].length-1)))}else{C=$transition(C,'float',rmuf(res[0]))}}else{res=int_pattern.exec(src.substr(pos)) +check_int(res[1]) +$pos=pos +if(res[2]!==undefined){C=$transition(C,'imaginary',rmu(res[1]))}else{C=$transition(C,'int',[10,rmu(res[0])])}}} pos+=res[0].length break case '\n': @@ -4735,7 +4954,6 @@ case ',': case ':': $pos=pos if(src.substr(pos,2)==":="){ -console.log("PEP 572",src.substr(pos,2)) C=$transition(C,":=") pos++}else{C=$transition(C,car)} pos++ @@ -4793,7 +5011,9 @@ break case '\\': if(src.charAt(pos+1)=='\n'){lnum++ pos+=2 -break} +if(pos==src.length){$_SyntaxError(C,["unexpected EOF while parsing"])}else if(C.type=="node"){$_SyntaxError(C,'nothing before \\')} +break}else{$pos=pos +$_SyntaxError(C,['unexpected character after line continuation character'])} case String.fromCharCode(12): pos+=1 break @@ -4802,23 +5022,34 @@ $pos=pos $_SyntaxError(C,'unknown token ['+car+']')}} if(br_stack.length !=0){var br_err=br_pos[0] $pos=br_err[1] -$_SyntaxError(br_err[0],["Unbalanced bracket "+br_stack.charAt(br_stack.length-1)])} -if(C !==null && C.type=="async"){ +var lines=src.split("\n"),id=root.id,fname=id.startsWith("$")? '' :id +$_SyntaxError(br_err[0],["unexpected EOF while parsing ("+fname+", line "+ +(lines.length-1)+")"])} +if(C !==null){if(C.type=="async"){ console.log("error with async",pos,src,src.substr(pos)) $pos=pos-7 -throw $_SyntaxError(C,"car "+car+"after async",pos)} -if(C !==null && C.tree[0]&& $indented.indexOf(C.tree[0].type)>-1){$pos=pos-1 -$_SyntaxError(C,'expected an indented block',pos)}} +throw $_SyntaxError(C,"car "+car+"after async",pos)}else if(C.tree[0]&& +$indented.indexOf(C.tree[0].type)>-1){$pos=pos-1 +$_SyntaxError(C,'expected an indented block',pos)}else{var parent=current.parent +if(parent.C && parent.C.tree && +parent.C.tree[0]&& +parent.C.tree[0].type=="try"){$pos=pos-1 +$_SyntaxError(C,["unexpected EOF while parsing"])} +if(root.yields_func_check){var save_pos=$pos +for(const _yield of root.yields_func_check){$pos=_yield[1] +_yield[0].check_in_function()} +$pos=save_pos}}}} var $create_root_node=$B.parser.$create_root_node=function(src,module,locals_id,parent_block,line_num){var root=new $Node('module') root.module=module root.id=locals_id -root.binding={__doc__:true,__name__:true,__file__:true,__package__:true,__annotations__:true} +root.binding={__doc__:true,__name__:true,__file__:true,__package__:true} root.parent_block=parent_block root.line_num=line_num root.indent=-1 root.comments=[] root.imports={} if(typeof src=="object"){root.is_comp=src.is_comp +if(src.has_annotations){root.binding.__annotations__=true} src=src.src} root.src=src return root} @@ -4827,55 +5058,73 @@ $pos=0 if(typeof module=="object"){var __package__=module.__package__ module=module.__name__}else{var __package__=""} parent_scope=parent_scope ||$B.builtins_scope -var t0=new Date().getTime(),is_comp=false -if(typeof src=='object'){is_comp=src.is_comp +var t0=new Date().getTime(),is_comp=false,has_annotations=true, +line_info, +ix +if(typeof src=='object'){var is_comp=src.is_comp,has_annotations=src.has_annotations,line_info=src.line_info,ix=src.ix +if(line_info !==undefined){line_num=parseInt(line_info.split(",")[0])} src=src.src} src=src.replace(/\r\n/gm,"\n") -while(src.endsWith("\\")){src=src.substr(0,src.length-1)} +if(src.endsWith("\\")&& !src.endsWith("\\\\")){src=src.substr(0,src.length-1)} if(src.charAt(src.length-1)!="\n"){src+="\n"} var locals_is_module=Array.isArray(locals_id) if(locals_is_module){locals_id=locals_id[0]} var internal=locals_id.charAt(0)=='$' var local_ns='$locals_'+locals_id.replace(/\./g,'_') var global_ns='$locals_'+module.replace(/\./g,'_') -var root=$create_root_node({src:src,is_comp:is_comp},module,locals_id,parent_scope,line_num) +var root=$create_root_node( +{src:src,is_comp:is_comp,has_annotations:has_annotations},module,locals_id,parent_scope,line_num) $tokenize(root,src) root.is_comp=is_comp +if(ix !=undefined){root.ix=ix} root.transform() var js=['var $B = __BRYTHON__;\n'],pos=1 -js[pos++]='var $bltns = __BRYTHON__.InjectBuiltins();eval($bltns);\n\n' -js[pos]='var $locals = '+local_ns+';' +js[pos++]='var _b_ = __BRYTHON__.builtins;\n' +js[pos]='var $locals = '+local_ns +if(is_comp){js[pos]+=' = {}'} var offset=0 root.insert(0,$NodeJS(js.join(''))) offset++ -root.insert(offset++,$NodeJS(local_ns+'["__package__"] = "'+__package__+'"')) -root.insert(offset++,$NodeJS('$locals.__annotations__ = _b_.dict.$factory()')) +root.insert(offset++,$NodeJS(local_ns+'.__package__ = "'+__package__+'"')) +if(root.binding.__annotations__){root.insert(offset++,$NodeJS('$locals.__annotations__ = $B.empty_dict()'))} var enter_frame_pos=offset,js='var $top_frame = ["'+locals_id.replace(/\./g,'_')+'", '+ local_ns+', "'+module.replace(/\./g,'_')+'", '+ -global_ns+']\n$B.frames_stack.push($top_frame)\n'+ -'var $stack_length = $B.frames_stack.length' +global_ns+']\n$locals.$f_trace = $B.enter_frame($top_frame)\n'+ +'var $stack_length = $B.frames_stack.length;' root.insert(offset++,$NodeJS(js)) var try_node=new $NodeJS('try'),children=root.children.slice(enter_frame_pos+1,root.children.length) root.insert(enter_frame_pos+1,try_node) if(children.length==0){children=[$NodeJS('')]} children.forEach(function(child){try_node.add(child)}) -try_node.add($NodeJS('$B.leave_frame()')) +try_node.add($NodeJS('$B.leave_frame({$locals, value: _b_.None})')) root.children.splice(enter_frame_pos+2,root.children.length) var catch_node=$NodeJS('catch(err)') -catch_node.add($NodeJS('$B.leave_frame()')) +catch_node.add($NodeJS('$B.leave_frame({$locals, value: _b_.None})')) catch_node.add($NodeJS('throw err')) root.add(catch_node) -if($B.profile > 0){$add_profile(root,null,module)} -if($B.debug > 0){$add_line_num(root,null,module)} +$add_line_num(root,null,line_info) var t1=new Date().getTime() if($B.debug > 2){if(module==locals_id){console.log('module '+module+' translated in '+ (t1-t0)+' ms')}} $B.compile_time+=t1-t0 return root} +$B.set_import_paths=function(){ +var meta_path=[],path_hooks=[] +if($B.use_VFS){meta_path.push($B.finders.VFS)} +if($B.$options.static_stdlib_import !==false && $B.protocol !="file"){ +meta_path.push($B.finders.stdlib_static) +if($B.path.length > 3){$B.path.shift() +$B.path.shift()}} +if($B.protocol !=="file"){meta_path.push($B.finders.path) +path_hooks.push($B.$path_hooks[0])} +if($B.$options.cpython_import){if($B.$options.cpython_import=="replace"){$B.path.pop()} +meta_path.push($B.finders.CPython)} +$B.meta_path=meta_path +$B.path_hooks=path_hooks} var brython=$B.parser.brython=function(options){ -if(options===undefined){options={'debug':0}} +if(options===undefined){options={'debug':1}} if(typeof options=='number'){options={'debug':options}} -if(options.debug===undefined){options.debug=0} +if(options.debug===undefined){options.debug=1} $B.debug=options.debug _b_.__debug__=$B.debug > 0 $B.compile_time=0 @@ -4884,24 +5133,11 @@ $B.profile=options.profile if(options.indexedDB===undefined){options.indexedDB=true} if(options.static_stdlib_import===undefined){options.static_stdlib_import=true} $B.static_stdlib_import=options.static_stdlib_import -if(options.open !==undefined){_b_.open=options.open -console.log("DeprecationWarning: \'open\' option of \'brython\' "+ -"function will be deprecated in future versions of Brython.")} $B.$options=options -var meta_path=[],path_hooks=[] -if($B.use_VFS){meta_path.push($B.$meta_path[0]) -path_hooks.push($B.$path_hooks[0])} -if(options.static_stdlib_import !==false){ -meta_path.push($B.$meta_path[1]) -if($B.path.length > 3){$B.path.shift() -$B.path.shift()}} -meta_path.push($B.$meta_path[2]) -$B.meta_path=meta_path -path_hooks.push($B.$path_hooks[1]) -$B.path_hooks=path_hooks +$B.set_import_paths() var $href=$B.script_path=_window.location.href,$href_elts=$href.split('/') $href_elts.pop() -if($B.isWebWorker){$href_elts.pop()} +if($B.isWebWorker ||$B.isNode){$href_elts.pop()} $B.curdir=$href_elts.join('/') if(options.pythonpath !==undefined){$B.path=options.pythonpath $B.$options.static_stdlib_import=false} @@ -4914,24 +5150,16 @@ if(path.slice(-7).toLowerCase()=='.vfs.js' && (prefetch===undefined ||prefetch===true)){$B.path_importer_cache[path+'/']= $B.imported['_importlib'].VFSPathFinder(path)} if(lang){_importlib.optimize_import_for_path(path,lang)}})} -if(!$B.isWebWorker){ +if(!($B.isWebWorker ||$B.isNode)){ var path_links=document.querySelectorAll('head link[rel~=pythonpath]'),_importlib=$B.imported['_importlib'] for(var i=0,e;e=path_links[i];++i){var href=e.href; if((' '+e.rel+' ').indexOf(' prepend ')!=-1){$B.path.unshift(href);}else{$B.path.push(href);} -if(href.slice(-7).toLowerCase()=='.vfs.js' && -(' '+e.rel+' ').indexOf(' prefetch ')!=-1){ -$B.path_importer_cache[href+'/']= -$B.imported['_importlib'].VFSPathFinder.$factory(href)} var filetype=e.hreflang if(filetype){if(filetype.slice(0,2)=='x-'){filetype=filetype.slice(2)} _importlib.optimize_import_for_path(e.href,filetype)}}} -if(options.re_module !==undefined){if(options.re_module=='pyre' ||options.re_module=='jsre'){$B.$options.re=options.re} -console.log("DeprecationWarning: \'re_module\' option of \'brython\' "+ -"function will be deprecated in future versions of Brython.")} if($B.$options.args){$B.__ARGV=$B.$options.args}else{$B.__ARGV=_b_.list.$factory([])} -if(!$B.isWebWorker){_run_scripts(options)}} +if(!($B.isWebWorker ||$B.isNode)){_run_scripts(options)}} $B.run_script=function(src,name,run_loop){ -if(run_loop){if($B.idb_cx && $B.idb_cx.$closed){$B.tasks.push([$B.idb_open])}} $B.$py_module_path[name]=$B.script_path try{var root=$B.py2js(src,name,name),js=root.to_js(),script={__doc__:root.__doc__,js:js,__name__:name,$src:src,__file__:$B.script_path+ ($B.script_path.endsWith("/")? "" :"/")+name} @@ -4944,7 +5172,7 @@ if(type==".py"){var src=submodule[1],subimports=submodule[2],is_package=submodul if(type==".py"){ required_stdlib_imports(subimports)} subimports.forEach(function(mod){if(imports.indexOf(mod)==-1){imports.push(mod)}})}}}) -for(var j=0;j 1){console.log($err) for(var attr in $err){console.log(attr+' : ',$err[attr])}} if($err.$py_error===undefined){console.log('Javascript error',$err) $err=_b_.RuntimeError.$factory($err+'')} -var $trace=_b_.getattr($err,'info')+'\n'+$err.__name__+ +var $trace=$B.$getattr($err,'info')+'\n'+$err.__name__+ ': '+$err.args -try{_b_.getattr($B.stderr,'write')($trace)}catch(print_exc_err){console.log($trace)} +try{$B.$getattr($B.stderr,'write')($trace)}catch(print_exc_err){console.log($trace)} throw $err}}else{if($elts.length > 0){if(options.indexedDB && $B.has_indexedDB && $B.hasOwnProperty("VFS")){$B.tasks.push([$B.idb_open])}} for(var i=0;i < $elts.length;i++){var elt=$elts[i] @@ -4996,8 +5222,7 @@ src=src.replace(/^\n/,'') $B.webworkers[worker.id]=src}} for(var i=0;i < $elts.length;i++){var elt=$elts[i] if(elt.type=="text/python" ||elt.type=="text/python3"){ -if(elt.id){module_name=elt.id} -else{ +if(elt.id){module_name=elt.id}else{ if(first_script){module_name='__main__' first_script=false}else{module_name='__main__'+$B.UUID()} while(defined_ids[module_name]!==undefined){module_name='__main__'+$B.UUID()}} @@ -5012,29 +5237,33 @@ $B.$Node=$Node $B.$NodeJSCtx=$NodeJSCtx $B.brython=brython})(__BRYTHON__) var brython=__BRYTHON__.brython +if(__BRYTHON__.isNode){global.__BRYTHON__=__BRYTHON__ +module.exports={__BRYTHON__ }} ; (function($B){var _b_=$B.builtins +if($B.VFS_timestamp && $B.VFS_timestamp > $B.timestamp){ +$B.timestamp=$B.VFS_timestamp} function idb_load(evt,module){ var res=evt.target.result var timestamp=$B.timestamp -if($B.VFS_timestamp && $B.VFS_timestamp > $B.timestamp){ -$B.timestamp=$B.VFS_timestamp} -if(res===undefined ||res.timestamp !=$B.timestamp){ -if($B.VFS[module]!==undefined){var elts=$B.VFS[module],ext=elts[0],source=elts[1],is_package=elts.length==4,__package__ -if(ext==".py"){ +if(res===undefined ||res.timestamp !=$B.timestamp || +($B.VFS[module]&& res.source_ts !==$B.VFS[module].timestamp)){ +if($B.VFS[module]!==undefined){var elts=$B.VFS[module],ext=elts[0],source=elts[1] +if(ext==".py"){var imports=elts[2],is_package=elts.length==4,source_ts=elts.timestamp,__package__ if(is_package){__package__=module} else{var parts=module.split(".") parts.pop() __package__=parts.join(".")} $B.imported[module]=$B.module.$factory(module,"",__package__) -try{var root=$B.py2js(source,module,module),js=root.to_js()}catch(err){$B.handle_error(err) -throw err} +try{var root=$B.py2js(source,module,module),js=root.to_js()}catch(err){$B.handle_error(err)} delete $B.imported[module] if($B.debug > 1){console.log("precompile",module)} -var imports=elts[2] +var parts=module.split(".") +if(parts.length > 1){parts.pop()} +if($B.stdlib.hasOwnProperty(parts.join("."))){var imports=elts[2] imports=imports.join(",") -$B.tasks.splice(0,0,[store_precompiled,module,js,imports,is_package])}else{console.log('bizarre',module,ext)}}else{}}else{ +$B.tasks.splice(0,0,[store_precompiled,module,js,source_ts,imports,is_package])}}else{console.log('bizarre',module,ext)}}else{}}else{ if(res.is_package){$B.precompiled[module]=[res.content]}else{$B.precompiled[module]=res.content} if(res.imports.length > 0){ if($B.debug > 1){console.log(module,"imports",res.imports)} @@ -5052,8 +5281,12 @@ if(!$B.imported.hasOwnProperty(subimport)&& if($B.VFS.hasOwnProperty(subimport)){var submodule=$B.VFS[subimport],ext=submodule[0],source=submodule[1] if(submodule[0]==".py"){$B.tasks.splice(0,0,[idb_get,subimport])}else{add_jsmodule(subimport,source)}}}}}} loop()} -function store_precompiled(module,js,imports,is_package){ -var db=$B.idb_cx.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),cursor=store.openCursor(),data={"name":module,"content":js,"imports":imports,"timestamp":__BRYTHON__.timestamp,"is_package":is_package},request=store.put(data) +function store_precompiled(module,js,source_ts,imports,is_package){ +var db=$B.idb_cx.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),cursor=store.openCursor(),data={"name":module,"content":js,"imports":imports,"origin":origin,"timestamp":__BRYTHON__.timestamp,"source_ts":source_ts,"is_package":is_package},request=store.put(data) +if($B.debug > 1){console.log("store precompiled",module,"package",is_package)} +document.dispatchEvent(new CustomEvent('precompile',{detail:'cache module '+module})) +var ix=$B.outdated.indexOf(module) +if(ix >-1){$B.outdated.splice(ix,1)} request.onsuccess=function(evt){ $B.tasks.splice(0,0,[idb_get,module]) loop()}} @@ -5062,31 +5295,50 @@ var db=$B.idb_cx.result,tx=db.transaction("modules","readonly") try{var store=tx.objectStore("modules") req=store.get(module) req.onsuccess=function(evt){idb_load(evt,module)}}catch(err){console.info('error',err)}} -$B.idb_open=function(obj){var idb_cx=$B.idb_cx=indexedDB.open("brython_stdlib") +$B.idb_open=function(obj){$B.idb_name="brython-cache" +var idb_cx=$B.idb_cx=indexedDB.open($B.idb_name) idb_cx.onsuccess=function(){var db=idb_cx.result if(!db.objectStoreNames.contains("modules")){var version=db.version db.close() console.info('create object store',version) -idb_cx=indexedDB.open("brython_stdlib",version+1) +idb_cx=indexedDB.open($B.idb_name,version+1) idb_cx.onupgradeneeded=function(){console.info("upgrade needed") var db=$B.idb_cx.result,store=db.createObjectStore("modules",{"keyPath":"name"}) store.onsuccess=loop} idb_cx.onversionchanged=function(){console.log("version changed")} idb_cx.onsuccess=function(){console.info("db opened",idb_cx) var db=idb_cx.result,store=db.createObjectStore("modules",{"keyPath":"name"}) -store.onsuccess=loop}}else{console.info("using indexedDB for stdlib modules cache") -loop()}} +store.onsuccess=loop}}else{if($B.debug > 1){console.info("using indexedDB for stdlib modules cache")} +var tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),record,outdated=[] +var openCursor=store.openCursor() +openCursor.onerror=function(evt){console.log("open cursor error",evt)} +openCursor.onsuccess=function(evt){cursor=evt.target.result +if(cursor){record=cursor.value +if(record.timestamp==$B.timestamp){if(!$B.VFS ||!$B.VFS[record.name]|| +$B.VFS[record.name].timestamp==record.source_ts){ +if(record.is_package){$B.precompiled[record.name]=[record.content]}else{$B.precompiled[record.name]=record.content} +if($B.debug > 1){console.info("load from cache",record.name)}}else{ +outdated.push(record.name)}}else{outdated.push(record.name)} +cursor.continue()}else{if($B.debug > 1){console.log("done")} +$B.outdated=outdated +loop()}}}} idb_cx.onupgradeneeded=function(){console.info("upgrade needed") var db=idb_cx.result,store=db.createObjectStore("modules",{"keyPath":"name"}) store.onsuccess=loop} -idb_cx.onerror=function(){console.info('could not open indexedDB database')}} +idb_cx.onerror=function(){console.info('could not open indexedDB database') +$B.idb_cx=null +$B.idb_name=null +$B.$options.indexedDB=false +loop()}} $B.ajax_load_script=function(script){var url=script.url,name=script.name -var req=new XMLHttpRequest() -req.open("GET",url+"?"+Date.now(),true) +if($B.files && $B.files.hasOwnProperty(name)){$B.tasks.splice(0,0,[$B.run_script,$B.files[name],name,true])}else if($B.protocol !="file"){var req=new XMLHttpRequest(),qs=$B.$options.cache ? '' : +(url.search(/\?/)>-1 ? '&' :'?')+Date.now() +req.open("GET",url+qs,true) req.onreadystatechange=function(){if(this.readyState==4){if(this.status==200){var src=this.responseText -if(script.is_ww){$B.webworkers[name]=src}else{$B.tasks.splice(0,0,[$B.run_script,src,name,true])}}else if(this.status==404){throw Error(url+" not found")} -loop()}} -req.send()} +if(script.is_ww){$B.webworkers[name]=src}else{$B.tasks.splice(0,0,[$B.run_script,src,name,true])} +loop()}else if(this.status==404){throw Error(url+" not found")}}} +req.send()}else{throw _b_.IOError.$factory("can't load external script at "+ +script.url+" (Ajax calls not supported with protocol file:///)")}} function add_jsmodule(module,source){ source+="\nvar $locals_"+ module.replace(/\./g,"_")+" = $module" @@ -5094,10 +5346,16 @@ $B.precompiled[module]=source} var inImported=$B.inImported=function(module){if($B.imported.hasOwnProperty(module)){}else if(__BRYTHON__.VFS && __BRYTHON__.VFS.hasOwnProperty(module)){var elts=__BRYTHON__.VFS[module] if(elts===undefined){console.log('bizarre',module)} var ext=elts[0],source=elts[1],is_package=elts.length==4 -if(ext==".py"){if($B.idb_cx){$B.tasks.splice(0,0,[idb_get,module])}}else{add_jsmodule(module,source)}}else{console.log("bizarre",module)} +if(ext==".py"){if($B.idb_cx && !$B.idb_cx.$closed){$B.tasks.splice(0,0,[idb_get,module])}}else{add_jsmodule(module,source)}}else{console.log("bizarre",module)} loop()} var loop=$B.loop=function(){if($B.tasks.length==0){ -if($B.idb_cx){$B.idb_cx.result.close() +if($B.idb_cx && ! $B.idb_cx.$closed){var db=$B.idb_cx.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules") +while($B.outdated.length > 0){var module=$B.outdated.pop(),req=store.delete(module) +req.onsuccess=function(event){if($B.debug > 1){console.info("delete outdated",module)} +document.dispatchEvent(new CustomEvent('precompile',{detail:'remove outdated '+module+ +' from cache'}))}} +document.dispatchEvent(new CustomEvent('precompile',{detail:"close"})) +$B.idb_cx.result.close() $B.idb_cx.$closed=true} return} var task=$B.tasks.shift(),func=task[0],args=task.slice(1) @@ -5106,21 +5364,26 @@ module.$src=script.$src module.__file__=script.__file__ $B.imported[script_id]=module new Function("$locals_"+script_id,script.js)(module)}catch(err){ -if(err.$py_error===undefined){console.log('Javascript error',err) +if(err.__class__===undefined){console.log('Javascript error',err) if($B.is_recursion_error(err)){err=_b_.RecursionError.$factory("too much recursion")}else{$B.print_stack() err=_b_.RuntimeError.$factory(err+'')}} +if($B.debug > 1){console.log("handle error",err.__class__,err.args,err.$stack) +console.log($B.frames_stack.slice())} $B.handle_error(err)} loop()}else{ -func.apply(null,args)}} +try{func.apply(null,args)}catch(err){$B.handle_error(err)}}} $B.tasks=[] $B.has_indexedDB=self.indexedDB !==undefined $B.handle_error=function(err){ -if(err.__class__ !==undefined){var name=$B.class_name(err),trace=_b_.getattr(err,'info') +if($B.debug > 1){console.log("handle error",err.__class__,err.args)} +if(err.__class__ !==undefined){var name=$B.class_name(err),trace=$B.$getattr(err,'info') if(name=='SyntaxError' ||name=='IndentationError'){var offset=err.args[3] trace+='\n '+' '.repeat(offset)+'^'+ -'\n'+name+': '+err.args[0]}else{trace+='\n'+name+': '+err.args}}else{console.log(err) +'\n'+name+': '+err.args[0]}else{trace+='\n'+name +if(err.args[0]&& err.args[0]!==_b_.None){trace+=': '+_b_.str.$factory(err.args[0])}}}else{console.log(err) trace=err+""} -try{_b_.getattr($B.stderr,'write')(trace)}catch(print_exc_err){console.log(trace)} +if($B.debug > 1){try{$B.$getattr($B.stderr,'write')(trace) +try{$B.$getattr($B.stderr,'flush')()}catch(err){console.log(err)}}catch(print_exc_err){console.log(trace)}}else{console.debug(trace)} throw err} function required_stdlib_imports(imports,start){ var nb_added=0 @@ -5175,15 +5438,16 @@ if($.spec !==""){throw _b_.TypeError.$factory( "non-empty format string passed to object.__format__")} return _b_.getattr($.self,"__str__")()} object.__ge__=function(){return _b_.NotImplemented} -object.__getattribute__=function(obj,attr){var klass=obj.__class__ ||$B.get_class(obj) +object.__getattribute__=function(obj,attr){var klass=obj.__class__ ||$B.get_class(obj),is_own_class_instance_method=false var $test=false if($test){console.log("attr",attr,"de",obj,"klass",klass)} if(attr==="__class__"){return klass} var res=obj[attr] if(Array.isArray(obj)&& Array.prototype[attr]!==undefined){ res=undefined} -if(res===undefined && obj.__dict__ && -obj.__dict__.$string_dict.hasOwnProperty(attr)){return obj.__dict__.$string_dict[attr]} +if(res===undefined && obj.__dict__){var dict=obj.__dict__,attr1=$B.from_alias(attr) +if(dict.$string_dict.hasOwnProperty(attr1)){if($test){console.log("__dict__ hasOwnProperty",attr1,dict.$string_dict[attr1])} +return dict.$string_dict[attr1][0]}} if(res===undefined){ function check(obj,kl,attr){var v=kl[attr] if(v !==undefined){return v}} @@ -5191,7 +5455,8 @@ res=check(obj,klass,attr) if(res===undefined){var mro=klass.__mro__ for(var i=0,len=mro.length;i < len;i++){res=check(obj,mro[i],attr) if(res !==undefined){if($test){console.log("found in",mro[i])} -break}}}}else{if(res.__set__===undefined){ +break}}}else{if(res.__class__ !==$B.method && res.__get__===undefined){ +is_own_class_instance_method=true}}}else{if(res.__set__===undefined){ return res}} if(res !==undefined){if($test){console.log(res)} if(res.__class__===_b_.property){return res.__get__(res,obj,klass)} @@ -5204,7 +5469,7 @@ for(var i=0;i < res.__class__.__mro__.length && get===undefined;i++){get=res.__class__.__mro__[i].__get__}} if($test){console.log("get",get)} var __get__=get===undefined ? null : -_b_.getattr(res,"__get__",null) +$B.$getattr(res,"__get__",null) if($test){console.log("__get__",__get__)} if(__get__ !==null){try{return __get__.apply(null,[obj,klass])} catch(err){console.log('error in get.apply',err) @@ -5215,22 +5480,21 @@ if(typeof res=="object"){if(__get__ &&(typeof __get__=="function")){get_func=fun if(__get__===null &&(typeof res=="function")){__get__=function(x){return x}} if(__get__ !==null){ res.__name__=attr -if(attr=="__new__"){res.$type="staticmethod"} +if(attr=="__new__" || +res.__class__===$B.builtin_function){res.$type="staticmethod"} var res1=__get__.apply(null,[res,obj,klass]) if($test){console.log("res",res,"res1",res1)} if(typeof res1=="function"){ if(res1.__class__===$B.method){return res} if(res.$type=="staticmethod"){return res} -else{var self=res.__class__===$B.method ? klass :obj -function method(){var args=[self] -for(var i=0;i < arguments.length;i++){args.push(arguments[i])} -var result=res.apply(null,args) -return result} -if(attr=="a"){console.log("make method from res",res)} +else{var self=res.__class__===$B.method ? klass :obj,method=function(){var args=[self] +for(var i=0,len=arguments.length;i < len;i++){args.push(arguments[i])} +return res.apply(this,args)} method.__class__=$B.method -method.__get__=function(obj,cls){var clmethod=function(){return res(cls,...arguments)} +method.__get__=function(obj,cls){var clmethod=res.bind(null,cls) clmethod.__class__=$B.method -clmethod.$infos={__self__:cls,__func__:res,__name__:res.$infos.__name__,__qualname__:cls.$infos.__name__+"."+res.$infos.__name__} +clmethod.$infos={__self__:cls,__func__:res,__name__:res.$infos.__name__,__qualname__:cls.$infos.__name__+"."+ +res.$infos.__name__} return clmethod} method.__get__.__class__=$B.method_wrapper method.__get__.$infos=res.$infos @@ -5238,6 +5502,8 @@ if(klass.$infos===undefined){console.log("no $infos",klass) console.log($B.last($B.frames_stack))} method.$infos={__self__:self,__func__:res,__name__:attr,__qualname__:klass.$infos.__name__+"."+attr} if($test){console.log("return method",method)} +if(is_own_class_instance_method){obj.$method_cache=obj.$method_cache ||{} +obj.$method_cache[attr]=[method,res]} return method}}else{ return res1}} return res}else{ @@ -5246,7 +5512,8 @@ if(_ga===undefined){_ga=klass["__getattr__"] if(_ga===undefined){var mro=klass.__mro__ for(var i=0,len=mro.length;i < len;i++){_ga=mro[i]["__getattr__"] if(_ga !==undefined){break}}}} -if(_ga !==undefined){return _ga(obj,attr)}}} +if(_ga !==undefined){if(klass===$B.module){return _ga(attr)} +return _ga(obj,attr)}else{throw _b_.AttributeError.$factory(attr)}}} object.__gt__=function(){return _b_.NotImplemented} object.__hash__=function(self){var hash=self.__hashvalue__ if(hash !==undefined){return hash} @@ -5254,6 +5521,10 @@ return self.__hashvalue__=$B.$py_next_hash--} object.__init__=function(){if(arguments.length==0){throw _b_.TypeError.$factory("descriptor '__init__' of 'object' "+ "object needs an argument")} return _b_.None} +object.__init_subclass__=function(){ +var $=$B.args("__init_subclass__",0,{},[],arguments,{},null,null) +return _b_.None} +object.__init_subclass__.$type="staticmethod" object.__le__=function(){return _b_.NotImplemented} object.__lt__=function(){return _b_.NotImplemented} object.__mro__=[] @@ -5261,7 +5532,7 @@ object.__new__=function(cls,...args){if(cls===undefined){throw _b_.TypeError.$fa var init_func=$B.$getattr(cls,"__init__") if(init_func===object.__init__){if(args.length > 0){throw _b_.TypeError.$factory("object() takes no parameters")}} return{ -__class__ :cls,__dict__:_b_.dict.$factory()}} +__class__ :cls,__dict__:$B.empty_dict()}} object.__ne__=function(self,other){ if(self===other){return false} var eq=$B.$getattr(self,"__eq__",null) @@ -5274,8 +5545,8 @@ _reconstructor.$infos={__qualname__:"_reconstructor"} var res=[_reconstructor] res.push(_b_.tuple.$factory([self.__class__]. concat(self.__class__.__mro__))) -var d=_b_.dict.$factory() -for(var attr in self.__dict__.$string_dict){d.$string_dict[attr]=self.__dict__.$string_dict[attr]} +var d=$B.empty_dict() +for(var attr in self.__dict__.$string_dict){_b_.dict.$setitem(d.$string_dict,attr,self.__dict__.$string_dict[attr][0])} console.log("object.__reduce__, d",d) res.push(d) return _b_.tuple.$factory(res)} @@ -5286,11 +5557,11 @@ object.__reduce_ex__=function(self){var res=[__newobj__] var arg2=_b_.tuple.$factory([self.__class__]) if(Array.isArray(self)){self.forEach(function(item){arg2.push(item)})} res.push(arg2) -var d=_b_.dict.$factory(),nb=0 -if(self.__dict__===undefined){console.log("no dict",self) -$B.frames_stack.forEach(function(frame){console.log(frame[0],frame[1],frame[2])})} +var d=$B.empty_dict(),nb=0 +if(self.__dict__===undefined){throw _b_.TypeError.$factory("cannot pickle '"+ +$B.class_name(self)+"' object")} for(var attr in self.__dict__.$string_dict){if(attr=="__class__" ||attr.startsWith("$")){continue} -d.$string_dict[attr]=self.__dict__.$string_dict[attr] +_b_.dict.$setitem(d,attr,self.__dict__.$string_dict[attr][0]) nb++} if(nb==0){d=_b_.None} res.push(d) @@ -5298,8 +5569,9 @@ res.push(_b_.None) return _b_.tuple.$factory(res)} object.__repr__=function(self){if(self===object){return ""} if(self.__class__===_b_.type){return ""} -if(self.__class__.$infos.__module__ !==undefined && -self.__class__.$infos.__module__ !=="builtins"){return "<"+self.__class__.$infos.__module__+"."+ +var module=self.__class__.$infos.__module__ +if(module !==undefined && !module.startsWith("$")&& +module !=="builtins"){return "<"+self.__class__.$infos.__module__+"."+ $B.class_name(self)+" object>"}else{return "<"+$B.class_name(self)+" object>"}} object.__setattr__=function(self,attr,val){if(val===undefined){ throw _b_.TypeError.$factory( @@ -5308,7 +5580,7 @@ if(object[attr]===undefined){throw _b_.AttributeError.$factory( "'object' object has no attribute '"+attr+"'")}else{throw _b_.AttributeError.$factory( "'object' object attribute '"+attr+"' is read-only")}} if($B.aliased_names[attr]){attr="$$"+attr} -if(self.__dict__){self.__dict__.$string_dict[attr]=val}else{ +if(self.__dict__){_b_.dict.$setitem(self.__dict__,attr,val)}else{ self[attr]=val} return _b_.None} object.__setattr__.__get__=function(obj){return function(attr,val){object.__setattr__(obj,attr,val)}} @@ -5331,7 +5603,7 @@ $B.$class_constructor=function(class_name,class_obj,bases,parents_names,kwargs){ var metaclass var module=class_obj.__module__ if(module===undefined){ -module=$B.last($B.frames_stack)[2]} +module=class_obj.__module__=$B.last($B.frames_stack)[2]} for(var i=0;i < bases.length;i++){if(bases[i]===undefined){ $B.line_info=class_obj.$def_line throw _b_.NameError.$factory("name '"+parents_names[i]+ @@ -5343,34 +5615,39 @@ metaclass=val}else{ extra_kwargs[key]=val} prepare_kwargs[key]=val}} var mro0=class_obj +if(class_obj.__eq__ !==undefined && class_obj.__hash__===undefined){class_obj.__hash__=_b_.None} var orig_bases=bases.slice(),use_mro_entries=false for(var i=0;i < bases.length;i++){if(bases[i]===undefined || -(bases[i].__mro__===undefined && -bases[i].__class__ !==$B.JSObject)){var mro_entries=$B.$getattr(bases[i],"__mro_entries__",_b_.None) +(bases[i].__mro__===undefined)){var mro_entries=$B.$getattr(bases[i],"__mro_entries__",_b_.None) if(mro_entries !==_b_.None){var entries=_b_.list.$factory(mro_entries(bases)) bases.splice(i,1,...entries) use_mro_entries=true i-- continue}}} -if(metaclass===undefined){if(bases && bases.length > 0 && bases[0].__class__ !==$B.JSObject){metaclass=bases[0].__class__ +if(metaclass===undefined){if(bases && bases.length > 0){metaclass=bases[0].__class__ +if(metaclass===undefined){ +if(typeof bases[0]=="function"){if(bases.length !=1){throw _b_.TypeError.$factory("A Brython class "+ +"can inherit at most 1 Javascript constructor")} +metaclass=bases[0].__class__=$B.JSMeta +$B.set_func_names(bases[0],module)}else{throw _b_.TypeError.$factory("Argument of "+class_name+ +"is not a class (type '"+$B.class_name(bases[0])+ +"')")}} for(var i=1;i < bases.length;i++){var mc=bases[i].__class__ -if(mc===metaclass){}else if(mc.__bases__ && -mc.__bases__.indexOf(metaclass)>-1){metaclass=mc}else if(metaclass.__bases__ && +if(mc===metaclass ||_b_.issubclass(metaclass,mc)){}else if(_b_.issubclass(mc,metaclass)){metaclass=mc}else if(metaclass.__bases__ && metaclass.__bases__.indexOf(mc)==-1){throw _b_.TypeError.$factory("metaclass conflict: the "+ "metaclass of a derived class must be a (non-"+ "strict) subclass of the metaclasses of all its bases")}}}else{metaclass=_b_.type}} -var prepare=$B.$getattr(metaclass,"__prepare__",_b_.None),cl_dict=prepare(class_name,bases) -if(cl_dict.__class__ !==_b_.dict){set_class_item=$B.$getattr(cl_dict,"__setitem__")}else{set_class_item=function(attr,value){cl_dict.$string_dict[attr]=value}} -for(var attr in class_obj){if(attr=="__annotations__"){cl_dict.$string_dict[attr]=cl_dict.$string_dict[attr]|| -_b_.dict.$factory() -for(var key in class_obj[attr].$string_dict){$B.$setitem(cl_dict.$string_dict[attr],key,class_obj[attr].$string_dict[key])}}else{if(attr.charAt(0)!="$" ||attr.substr(0,2)=="$$"){set_class_item(attr,class_obj[attr])}}} +var prepare=$B.$getattr(metaclass,"__prepare__",_b_.None),cl_dict=$B.$call(prepare)(class_name,bases) +if(cl_dict.__class__ !==_b_.dict){set_class_item=$B.$getattr(cl_dict,"__setitem__")}else{set_class_item=function(attr,value){cl_dict.$string_dict[attr]=[value,cl_dict.$order++]}} +for(var attr in class_obj){if(attr=="__annotations__"){if(cl_dict.$string_dict[attr]===undefined){cl_dict.$string_dict[attr]=[$B.empty_dict(),cl_dict.$order++]} +for(var key in class_obj[attr].$string_dict){$B.$setitem(cl_dict.$string_dict[attr][0],key,class_obj[attr].$string_dict[key][0])}}else{if(attr.charAt(0)!="$" ||attr.substr(0,2)=="$$"){set_class_item(attr,class_obj[attr])}}} if(use_mro_entries){set_class_item("__orig_bases__",_b_.tuple.$factory(orig_bases))} var class_dict={__bases__:bases,__class__:metaclass,__dict__:cl_dict} -if(cl_dict.__class__===_b_.dict){for(var key in cl_dict.$string_dict){class_dict[key]=cl_dict.$string_dict[key]}}else{var get_class_item=$B.$getattr(cl_dict,"__getitem__") +if(cl_dict.__class__===_b_.dict){for(var key in cl_dict.$string_dict){class_dict[key]=cl_dict.$string_dict[key][0]}}else{var get_class_item=$B.$getattr(cl_dict,"__getitem__") var it=_b_.iter(cl_dict) while(true){try{var key=_b_.next(it) class_dict[key]=get_class_item(key)}catch(err){break}}} -class_dict.__mro__=_b_.type.mro(class_dict) +class_dict.__mro__=_b_.type.mro(class_dict).slice(1) var is_instanciable=true,non_abstract_methods={},abstract_methods={},mro=[class_dict].concat(class_dict.__mro__) for(var i=0;i < mro.length;i++){var kdict=i==0 ? mro0 :mro[i] for(var attr in kdict){if(non_abstract_methods[attr]){continue} @@ -5382,29 +5659,32 @@ var _slots=class_obj.__slots__ if(_slots !==undefined){if(typeof _slots=="string"){_slots=[_slots]}else{_slots=_b_.list.$factory(_slots)} cl_dict.__slots__=_slots} for(var i=0;i < mro.length-1;i++){for(var attr in mro[i]){if(attr=="__setattr__"){cl_dict.$has_setattr=true -break}else if(mro[i][attr]&& mro[i][attr].__get__){cl_dict.$has_setattr=true -break}}} +break}else if(mro[i][attr]){if(mro[i][attr].__get__ ||(mro[i][attr].__class__ && +mro[i][attr].__class__.__get__)){ +cl_dict.$has_setattr=true +break}}}} var meta_new=_b_.type.__getattribute__(metaclass,"__new__") var kls=meta_new(metaclass,class_name,bases,cl_dict) kls.__module__=module -kls.$infos={__module__:module,__name__:class_name,__qualname__:class_name} +kls.$infos={__module__:module,__name__:$B.from_alias(class_name),__qualname__:class_obj.$qualname} kls.$subclasses=[] for(var attr in class_obj){if(attr.charAt(0)!="$" ||attr.substr(0,2)=="$$"){if(typeof class_obj[attr]=="function"){class_obj[attr].$infos.$class=kls}}} if(kls.__class__===metaclass){ var meta_init=_b_.type.__getattribute__(metaclass,"__init__") meta_init(kls,class_name,bases,cl_dict)} for(var i=0;i < bases.length;i++){bases[i].$subclasses=bases[i].$subclasses ||[] -bases[i].$subclasses.push(kls) -if(i==0){init_subclass=_b_.type.__getattribute__(bases[i],"__init_subclass__") -if(init_subclass.$infos.__func__ !==undefined){init_subclass.$infos.__func__(kls,{$nat:"kw",kw:extra_kwargs})}else{init_subclass(kls,{$nat:"kw",kw:extra_kwargs})}}} -if(bases.length==0){$B.$getattr(metaclass,"__init_subclass__")(kls,{$nat:"kw",kw:extra_kwargs})} +bases[i].$subclasses.push(kls)} +var sup=_b_.$$super.$factory(kls,kls) +var init_subclass=_b_.$$super.__getattribute__(sup,"__init_subclass__") +init_subclass({$nat:"kw",kw:extra_kwargs}) if(!is_instanciable){function nofactory(){throw _b_.TypeError.$factory("Can't instantiate abstract class "+ "interface with abstract methods "+ Object.keys(abstract_methods).join(", "))} kls.$factory=nofactory} kls.__qualname__=class_name.replace("$$","") return kls} -var type=$B.make_class("type",function(obj,bases,cl_dict){if(arguments.length==1){return obj.__class__ ||$B.get_class(obj)} +var type=$B.make_class("type",function(obj,bases,cl_dict){if(arguments.length==1){if(obj===undefined){return $B.UndefinedClass} +return obj.__class__ ||$B.get_class(obj)} return type.__new__(type,obj,bases,cl_dict)} ) type.__call__=function(){var extra_args=[],klass=arguments[0] @@ -5422,11 +5702,11 @@ type.__format__=function(klass,fmt_spec){ return _b_.str.$factory(klass)} type.__getattribute__=function(klass,attr){switch(attr){case "__annotations__": var mro=[klass].concat(klass.__mro__),res -for(var i=0,len=mro.length;i < len;i++){if(mro[i].__dict__){var ann=mro[i].__dict__.$string_dict.__annotations__ +for(var i=0,len=mro.length;i < len;i++){if(mro[i].__dict__){var ann=mro[i].__dict__.$string_dict.__annotations__[0] if(ann){if(res===undefined){res=ann}else if(res.__class__===_b_.dict && ann.__class__===_b_.dict){ for(var key in ann.$string_dict){res.$string_dict[key]=ann.$string_dict[key]}}}}} -if(res===undefined){res=_b_.dict.$factory()} +if(res===undefined){res=$B.empty_dict()} return res case "__bases__": var res=klass.__bases__ ||_b_.tuple.$factory() @@ -5445,7 +5725,7 @@ if(klass["__delattr__"]!==undefined){return klass["__delattr__"]} return method_wrapper.$factory(attr,klass,function(key){delete klass[key]})} var res=klass[attr] var $test=false -if($test){console.log("attr",attr,"of",klass,res)} +if($test){console.log("attr",attr,"of",klass,res,res+"")} if(res===undefined && klass.__slots__ && klass.__slots__.indexOf(attr)>-1){return member_descriptor.$factory(attr,klass)} if(klass.__class__ && @@ -5457,18 +5737,20 @@ return klass.__class__[attr].__get__(klass)} if(res===undefined){ var v=klass[attr] if(v===undefined){var mro=klass.__mro__ +if(mro===undefined){console.log("pas de mro pour",klass)} for(var i=0;i < mro.length;i++){var v=mro[i][attr] if(v !==undefined){res=v break}}}else{res=v} if(res===undefined){ -var meta=klass.__class__,res=meta[attr] +var meta=klass.__class__ ||$B.get_class(klass),res=meta[attr] if($test){console.log("search in meta",meta,res)} if(res===undefined){var meta_mro=meta.__mro__ for(var i=0;i < meta_mro.length;i++){var res=meta_mro[i][attr] if(res !==undefined){break}}} if(res !==undefined){if($test){console.log("found in meta",res,typeof res)} if(res.__class__===_b_.property){return res.fget(klass)} -if(typeof res=="function"){var meta_method=function(){return res(klass,...arguments)} +if(typeof res=="function"){ +var meta_method=res.bind(null,klass) meta_method.__class__=$B.method meta_method.$infos={__self__:klass,__func__:res,__name__:attr,__qualname__:klass.$infos.__name__+"."+attr,__module__:res.$infos ? res.$infos.__module__ :""} return meta_method}} @@ -5478,20 +5760,24 @@ if(getattr===undefined){for(var i=0;i < meta_mro.length;i++){if(meta_mro[i].__ge break}}} if(getattr !==undefined){return getattr(klass,attr)}}}} if(res !==undefined){if($test){console.log("res",res)} -if(res.__class__===_b_.property){return res } +if(res.__class__===_b_.property){return res} if(res.__get__){if(res.__class__===method){var result=res.__get__(res.__func__,klass) result.$infos={__func__:res,__name__:res.$infos.__name__,__qualname__:klass.$infos.__name__+"."+res.$infos.__name__,__self__:klass}}else{result=res.__get__(klass)} -return result} +return result}else if(res.__class__ && res.__class__.__get__){ +if(!(attr.startsWith("__")&& attr.endsWith("__"))){return res.__class__.__get__(res,_b_.None,klass)}} if(typeof res=="function"){ -if(res.$infos===undefined){console.log("warning: no attribute $infos for",res)} +if(res.$infos===undefined && $B.debug > 1){console.log("warning: no attribute $infos for",res,"klass",klass,"attr",attr)} if($test){console.log("res is function",res)} -if(attr=="__new__"){res.$type="staticmethod"} +if(attr=="__new__" || +res.__class__===$B.builtin_function){res.$type="staticmethod"} if(attr=="__class_getitem__" && res.__class__ !==$B.method){res=_b_.classmethod.$factory(res)} +if(attr=="__init_subclass__"){res=_b_.classmethod.$factory(res)} if(res.__class__===$B.method){return res.__get__(null,klass)}else{if($test){console.log("return res",res)} return res}}else{return res}}} +type.__hash__=function(cls){return _b_.hash(cls)} type.__init__=function(){} -type.__init_subclass__=function(cls,kwargs){ -var $=$B.args("__init_subclass__",1,{cls:null},["cls"],arguments,{},"args","kwargs") +type.__init_subclass__=function(){ +var $=$B.args("__init_subclass__",1,{},[],arguments,{},"args","kwargs") if($.kwargs !==undefined){if($.kwargs.__class__ !==_b_.dict || Object.keys($.kwargs.$string_dict).length > 0){throw _b_.TypeError.$factory( "__init_subclass__() takes no keyword arguments")}} @@ -5501,39 +5787,44 @@ if(kl===cls){return true} else{for(var i=0;i < kl.__mro__.length;i++){if(kl.__mro__[i]===cls){return true}}} return false} type.__instancecheck__.$type="staticmethod" -type.__name__={__get__:function(self){return self.$infos.__name__},__set__:function(self,value){self.$infos.__name__=value}} +type.__name__={__get__:function(self){return self.$infos.__name__},__set__:function(self,value){self.$infos.__name__=value},__str__:function(self){return "type"},__eq__:function(self,other){return self.$infos.__name__==other}} type.__new__=function(meta,name,bases,cl_dict){ -var class_dict={__class__ :meta,__bases__ :bases,__dict__ :cl_dict,$infos:{__name__:name.replace("$$","")},$is_class:true,$has_setattr:cl_dict.$has_setattr} +var module=cl_dict.$string_dict.__module__ +if(module){module=module[0]} +var class_dict={__class__ :meta,__bases__ :bases,__dict__ :cl_dict,$infos:{__name__:name.replace("$$",""),__module__:module},$is_class:true,$has_setattr:cl_dict.$has_setattr} +class_dict.__mro__=type.mro(class_dict).slice(1) var items=$B.dict_to_list(cl_dict) for(var i=0;i < items.length;i++){var key=$B.to_alias(items[i][0]),v=items[i][1] +if(key==="__module__"){continue} +if(v===undefined){continue} class_dict[key]=v -if(typeof v=="function"){v.$infos.$class=class_dict +if(v.__class__){ +var set_name=$B.$getattr(v.__class__,"__set_name__",_b_.None) +if(set_name !==_b_.None){set_name(v,class_dict,key)}} +if(typeof v=="function"){if(v.$infos===undefined){console.log("type new",v,v+"") +console.log($B.frames_stack.slice())} +v.$infos.$class=class_dict +v.$infos.__qualname__=name+'.'+v.$infos.__name__ if(v.$infos.$defaults){ var $defaults=v.$infos.$defaults $B.Function.__setattr__(v,"__defaults__",$defaults)}}} -class_dict.__mro__=type.mro(class_dict) return class_dict} type.__repr__=type.__str__=function(kls){if(kls.$infos===undefined){console.log("no $infos",kls)} -var qualname=kls.$infos.__name__ +var qualname=kls.$infos.__qualname__ if(kls.$infos.__module__ && kls.$infos.__module__ !="builtins" && !kls.$infos.__module__.startsWith("$")){qualname=kls.$infos.__module__+"."+qualname} return ""} -type.__prepare__=function(){return _b_.dict.$factory()} -type.__qualname__={__get__:function(self){return self.$infos.__qualname__ ||self.$infos.__name__},__set__:function(self,value){self.$infos.__qualname__=value}} +type.__prepare__=function(){return $B.empty_dict()} +type.__qualname__={__get__:function(self){return self.$infos.__qualname__ ||self.$infos.__name__},__set__:function(self,value){self.$infos.__qualname__=value},__str__:function(self){console.log("type.__qualname__")},__eq__:function(self,other){return self.$infos.__qualname__==other}} type.mro=function(cls){ var bases=cls.__bases__,seqs=[],pos1=0 for(var i=0;i < bases.length;i++){ if(bases[i]===_b_.str){bases[i]=$B.StringSubclass} var bmro=[],pos=0 if(bases[i]===undefined || -bases[i].__mro__===undefined){if(bases[i].__class__===$B.JSObject){ -var js_func=bases[i].js_func -bases[i]={__class__:_b_.type,__mro__:[_b_.object],__name__:js_func.name,__init__:function(instance,...args){args.forEach(function(arg,i){args[i]=$B.pyobj2jsobj(arg)}) -js_func.apply(instance,args) -for(var attr in instance){if(typeof instance[attr]=="function"){instance[attr]=(function(f){return function(){var res=f.apply(instance,arguments) -return $B.jsobj2pyobj(res)}})(instance[attr])}}}} -bases[i].__init__.$infos={__name__:bases[i].$infos.__name__}}else{throw _b_.TypeError.$factory( +bases[i].__mro__===undefined){if(bases[i].__class__===undefined){ +return[_b_.object]}else{throw _b_.TypeError.$factory( "Object passed as base class is not a class")}} bmro[pos++]=bases[i] var _tmp=bases[i].__mro__ @@ -5541,8 +5832,8 @@ if(_tmp[0]===bases[i]){_tmp.splice(0,1)} for(var k=0;k < _tmp.length;k++){bmro[pos++]=_tmp[k]} seqs[pos1++]=bmro} if(bases.indexOf(_b_.object)==-1){bases=bases.concat(_b_.tuple.$factory([_b_.object]))} -for(var i=0;i < bases.length;i++){seqs[pos1++]=bases[i]} -var mro=[],mpos=0 +seqs[pos1++]=bases.slice() +var mro=[cls],mpos=1 while(1){var non_empty=[],pos=0 for(var i=0;i < seqs.length;i++){if(seqs[i].length > 0){non_empty[pos++]=seqs[i]}} if(non_empty.length==0){break} @@ -5569,29 +5860,22 @@ _b_.type=type var wrapper_descriptor=$B.make_class("wrapper_descriptor") $B.set_func_names(wrapper_descriptor,"builtins") type.__call__.__class__=wrapper_descriptor -$B.$factory={__class__:type,$is_class:true} -$B.$factory.__mro__=[type,_b_.object] var $instance_creator=$B.$instance_creator=function(klass){ if(klass.$instanciable !==undefined){return function(){throw _b_.TypeError.$factory( "Can't instantiate abstract class interface "+ "with abstract methods")}} var metaclass=klass.__class__,call_func,factory -if(metaclass===_b_.type &&(!klass.__bases__ ||klass.__bases__.length==0)){if(klass.hasOwnProperty("__new__")){if(klass.hasOwnProperty("__init__")){factory=function(){var args=[] -for(var i=0;i < arguments.length;i++){args.push(arguments[i])} -var obj=klass.__new__.apply(null,[klass].concat(args)) -klass.__init__.apply(null,[obj].concat(args)) -return obj}}else{factory=function(){var args=[klass] -for(var i=0;i < arguments.length;i++){args.push(arguments[i])} -return klass.__new__.apply(null,args)}}}else if(klass.hasOwnProperty("__init__")){factory=function(){var obj={__class__:klass,__dict__:_b_.dict.$factory()} -var args=[obj] -for(var i=0;i < arguments.length;i++){args.push(arguments[i])} -klass.__init__.apply(null,args) +if(metaclass===_b_.type &&(!klass.__bases__ ||klass.__bases__.length==0)){if(klass.hasOwnProperty("__new__")){if(klass.hasOwnProperty("__init__")){factory=function(){ +var obj=klass.__new__.bind(null,klass). +apply(null,arguments) +klass.__init__.bind(null,obj).apply(null,arguments) +return obj}}else{factory=function(){return klass.__new__.bind(null,klass). +apply(null,arguments)}}}else if(klass.hasOwnProperty("__init__")){factory=function(){var obj={__class__:klass,__dict__:$B.empty_dict()} +klass.__init__.bind(null,obj).apply(null,arguments) return obj}}else{factory=function(){if(arguments.length > 0){if(arguments.length==1 && arguments[0].$nat && Object.keys(arguments[0].kw).length==0){}else{throw _b_.TypeError.$factory("object() takes no parameters")}} -return{__class__:klass,__dict__:_b_.dict.$factory()}}}}else{call_func=_b_.type.__getattribute__(metaclass,"__call__") -var factory=function(){var args=[klass] -for(var i=0;i < arguments.length;i++){args.push(arguments[i])} -return call_func.apply(null,args)}} +return{__class__:klass,__dict__:$B.empty_dict()}}}}else{call_func=_b_.type.__getattribute__(metaclass,"__call__") +var factory=function(){return call_func.bind(null,klass).apply(null,arguments)}} factory.__class__=$B.Function factory.$infos={__name__:klass.$infos.__name__,__module__:klass.$infos.__module__} return factory} @@ -5605,7 +5889,11 @@ var member_descriptor=$B.make_class("member_descriptor",function(attr,cls){retur member_descriptor.__str__=member_descriptor.__repr__=function(self){return ""} $B.set_func_names(member_descriptor,"builtins") -var method=$B.method=$B.make_class("method") +var method=$B.method=$B.make_class("method",function(func,cls){var f=function(){return $B.$call(func).bind(null,cls).apply(null,arguments)} +f.__class__=method +f.$infos=func.$infos +return f} +) method.__eq__=function(self,other){return self.$infos !==undefined && other.$infos !==undefined && self.$infos.__func__===other.$infos.__func__ && @@ -5629,37 +5917,70 @@ if(key=="__class__"){throw _b_.TypeError.$factory("__class__ assignment only sup throw _b_.AttributeError.$factory("'method' object has no attribute '"+ key+"'")} $B.set_func_names(method,"builtins") -method_descriptor=$B.method_descriptor= -$B.make_class("method_descriptor") -classmethod_descriptor=$B.classmethod_descriptor= -$B.make_class("classmethod_descriptor") +$B.method_descriptor=$B.make_class("method_descriptor") +$B.classmethod_descriptor=$B.make_class("classmethod_descriptor") +$B.GenericAlias=$B.make_class("GenericAlias",function(origin_class,items){return{ +__class__:$B.GenericAlias,origin_class,items}} +) +$B.GenericAlias.__args__={__get__:function(self){return $B.fast_tuple(self.items)}} +$B.GenericAlias.__call__=function(self,...args){return self.origin_class.$factory.apply(null,args)} +$B.GenericAlias.__eq__=function(self,other){return $B.rich_comp("__eq__",self.origin_class,other.origin_class)&& +$B.rich_comp("__eq__",self.items,other.items)} +$B.GenericAlias.__getitem__=function(self,item){throw _b_.TypeError.$factory("descriptor '__getitem__' for '"+ +self.origin_class.$infos.__name__+"' objects doesn't apply to a '"+ +$B.class_name(item)+"' object")} +$B.GenericAlias.__origin__={__get__:function(self){return self.origin_class}} +$B.GenericAlias.__parameters__={__get__:function(self){ +return $B.fast_tuple([])}} +$B.GenericAlias.__repr__=function(self){var items=self.items +for(var i=0,len=items.length;i < len;i++){if(items[i]===_b_.Ellipsis){items[i]='...'}else{items[i]=items[i].$infos.__name__}} +return self.origin_class.$infos.__qualname__+'['+ +items.join(", ")+']'} _b_.object.__class__=type})(__BRYTHON__) ; ;(function($B){var _b_=$B.builtins,_window=self,isWebWorker=('undefined' !==typeof WorkerGlobalScope)&& ("function"===typeof importScripts)&& (navigator instanceof WorkerNavigator) $B.args=function($fname,argcount,slots,var_names,args,$dobj,extra_pos_args,extra_kw_args){ -var $args=[] -if(Array.isArray(args)){$args=args} -else{ -for(var i=0,len=args.length;i < len;i++){$args.push(args[i])}} -var has_kw_args=false,nb_pos=$args.length,filled=0 -if(nb_pos > 0 && $args[nb_pos-1]&& $args[nb_pos-1].$nat){nb_pos-- -if(Object.keys($args[nb_pos].kw).length > 0){has_kw_args=true -var kw_args=$args[nb_pos].kw -if(Array.isArray(kw_args)){kw_args=$B.extend($fname,...kw_args)}}} +if($fname.startsWith("lambda_"+$B.lambda_magic)){$fname=""} +var has_kw_args=false,nb_pos=args.length,filled=0,extra_kw,only_positional +var end_positional=var_names.indexOf("/") +if(end_positional !=-1){var_names.splice(end_positional,1) +only_positional=var_names.slice(0,end_positional)} +if(nb_pos > 0 && args[nb_pos-1].$nat){nb_pos-- +if(Object.keys(args[nb_pos].kw).length > 0){has_kw_args=true +var kw_args=args[nb_pos].kw +if(Array.isArray(kw_args)){var kwa=kw_args[0] +for(var i=1,len=kw_args.length;i < len;i++){var kw_arg=kw_args[i] +if(kw_arg.__class__===_b_.dict){for(var k in kw_arg.$numeric_dict){throw _b_.TypeError.$factory($fname+ +"() keywords must be strings")} +for(var k in kw_arg.$object_dict){throw _b_.TypeError.$factory($fname+ +"() keywords must be strings")} +for(var k in kw_arg.$string_dict){if(kwa[k]!==undefined){throw _b_.TypeError.$factory($fname+ +"() got multiple values for argument '"+ +k+"'")} +kwa[k]=kw_arg.$string_dict[k][0]}}else{var it=_b_.iter(kw_arg),getitem=$B.$getattr(kw_arg,'__getitem__') +while(true){try{var k=_b_.next(it) +if(typeof k !=="string"){throw _b_.TypeError.$factory($fname+ +"() keywords must be strings")} +if(kwa[k]!==undefined){throw _b_.TypeError.$factory($fname+ +"() got multiple values for argument '"+ +k+"'")} +kwa[k]=getitem(k)}catch(err){if($B.is_exc(err,[_b_.StopIteration])){break} +throw err}}}} +kw_args=kwa}}} if(extra_pos_args){slots[extra_pos_args]=[] slots[extra_pos_args].__class__=_b_.tuple} if(extra_kw_args){ -extra_kw={__class__:_b_.dict,$numeric_dict:{},$object_dict:{},$string_dict :{},$str_hash:{},length:0}} +extra_kw=$B.empty_dict()} if(nb_pos > argcount){ if(extra_pos_args===null ||extra_pos_args=="*"){ msg=$fname+"() takes "+argcount+" positional argument"+ -(argcount> 1 ? "" :"s")+" but more were given" +(argcount > 1 ? "s" :"")+" but more were given" throw _b_.TypeError.$factory(msg)}else{ -for(var i=argcount;i < nb_pos;i++){slots[extra_pos_args].push($args[i])} +for(var i=argcount;i < nb_pos;i++){slots[extra_pos_args].push(args[i])} nb_pos=argcount}} -for(var i=0;i < nb_pos;i++){slots[var_names[i]]=$args[i] +for(var i=0;i < nb_pos;i++){slots[var_names[i]]=args[i] filled++} if(filled==argcount && argcount===var_names.length && ! has_kw_args){if(extra_kw_args){slots[extra_kw_args]=extra_kw} @@ -5668,10 +5989,11 @@ if(has_kw_args){for(var key in kw_args){var value=kw_args[key],key1=$B.to_alias( if(slots[key1]===undefined){ if(extra_kw_args){ if(key.substr(0,2)=="$$"){key=key.substr(2)} -extra_kw.$string_dict[key]=value}else{throw _b_.TypeError.$factory($fname+ +extra_kw.$string_dict[key]=[value,extra_kw.$order++]}else{throw _b_.TypeError.$factory($fname+ "() got an unexpected keyword argument '"+key+"'")}}else if(slots[key1]!==null){ throw _b_.TypeError.$factory($fname+ -"() got multiple values for argument '"+key+"'")}else{ +"() got multiple values for argument '"+key+"'")}else if(only_positional && only_positional.indexOf(key1)>-1){throw _b_.TypeError.$factory($fname+"() got an "+ +"unexpected keyword argument '"+key+"'")}else{ slots[key1]=value}}} var missing=[] for(var attr in slots){if(slots[attr]===null){if($dobj[attr]!==undefined){slots[attr]=$dobj[attr]} @@ -5691,29 +6013,32 @@ positional.slice(received))}else{throw _b_.TypeError.$factory(name+"() takes "+e " but more were given")}} $B.get_class=function(obj){ if(obj===null){return $B.$NoneDict} +if(obj===undefined){return $B.UndefinedClass} var klass=obj.__class__ if(klass===undefined){switch(typeof obj){case "number": if(obj % 1===0){ -obj.__class__=_b_.int return _b_.int} -obj.__class__=_b_.float return _b_.float case "string": return _b_.str case "boolean": return _b_.bool case "function": +if(obj.$is_js_func){ +return $B.JSObj} obj.__class__=$B.Function return $B.Function case "object": -if(obj.$class){return obj.$class} if(Array.isArray(obj)){if(Object.getPrototypeOf(obj)===Array.prototype){obj.__class__=_b_.list -return _b_.list}}else if(obj.constructor===Number){return _b_.float} +return _b_.list}}else if(obj.constructor===Number){return _b_.float}else if(typeof Node !=="undefined" +&& obj instanceof Node){return $B.DOMNode} break}} +if(klass===undefined){return $B.JSObj} return klass} -$B.class_name=function(obj){return $B.get_class(obj).$infos.__name__} +$B.class_name=function(obj){var klass=$B.get_class(obj) +if(klass===$B.JSObj){return 'Javascript '+obj.constructor.name}else{return klass.$infos.__name__}} $B.$list_comp=function(items){ -var ix=$B.UUID(),py="x"+ix+"=[]\n",indent=0 +var ix=$B.UUID(),py="x"+ix+" = []\n",indent=0 for(var i=1,len=items.length;i < len;i++){var item=items[i].replace(/\s+$/,"").replace(/\n/g,"") py+=" ".repeat(indent)+item+":\n" indent+=4} @@ -5721,33 +6046,40 @@ py+=" ".repeat(indent) py+="x"+ix+".append("+items[0]+")\n" return[py,ix]} $B.$dict_comp=function(module_name,parent_scope,items,line_num){ -var ix=$B.UUID(),res="res"+ix,py=res+"={}\n", +var ix=$B.UUID(),res="comp_result_"+$B.lambda_magic+ix,py=res+" = {}\n", indent=0 for(var i=1,len=items.length;i < len;i++){var item=items[i].replace(/\s+$/,"").replace(/\n/g,"") py+=" ".repeat(indent)+item+":\n" indent++} py+=" ".repeat(indent)+res+".update({"+items[0]+"})" -var dictcomp_name="dc"+ix,root=$B.py2js({src:py,is_comp:true},module_name,dictcomp_name,parent_scope,line_num),js=root.to_js() -js+='\nreturn $locals["'+res+'"]\n' -js="(function($locals_"+dictcomp_name+"){"+js+"})({})" +var line_info=line_num+','+module_name +var dictcomp_name="dc"+ix,root=$B.py2js( +{src:py,is_comp:true,line_info:line_info},module_name,dictcomp_name,parent_scope,line_num),outer_expr=root.outermost_expr.to_js(),js=root.to_js() +js+='\nreturn '+res+'\n' +js="(function(expr){"+js+"})("+outer_expr+")" $B.clear_ns(dictcomp_name) delete $B.$py_src[dictcomp_name] return js} -$B.$gen_expr=function(module_name,parent_scope,items,line_num){ -var $ix=$B.UUID(),py="def __ge"+$ix+"():\n", +$B.$gen_expr=function(module_name,parent_scope,items,line_num,set_comp){ +var ix=$B.UUID(),genexpr_name=(set_comp ? "set_comp"+$B.lambda_magic :"__ge")+ix,py=`def ${genexpr_name}(expr):\n`, indent=1 for(var i=1,len=items.length;i < len;i++){var item=items[i].replace(/\s+$/,"").replace(/\n/g,"") py+=" ".repeat(indent)+item+":\n" indent+=4} py+=" ".repeat(indent) py+="yield ("+items[0]+")" -var genexpr_name="__ge"+$ix,root=$B.py2js({src:py,is_comp:true},genexpr_name,genexpr_name,parent_scope,line_num),js=root.to_js(),lines=js.split("\n") +var line_info=line_num+','+module_name +var root=$B.py2js({src:py,is_comp:true,line_info:line_info,ix:ix},genexpr_name,genexpr_name,parent_scope,line_num),js=root.to_js(),lines=js.split("\n") +if(root.outermost_expr===undefined){console.log("no outermost",module_name,parent_scope)} +var outer_expr=root.outermost_expr.to_js() js=lines.join("\n") -js+="\nvar $res = $locals_"+genexpr_name+'["'+genexpr_name+ -'"]();\n$res.is_gen_expr = true;\nreturn $res\n' -js="(function($locals_"+genexpr_name+"){"+js+"})({})\n" -delete $B.$py_src[genexpr_name] +js+="\nvar $res = $B.generator.$factory("+genexpr_name+ +')('+outer_expr+');\nreturn $res\n' +js="(function($locals_"+genexpr_name+"){"+js+"})($locals)\n" return js} +$B.copy_namespace=function(){var ns={} +for(const frame of $B.frames_stack){for(const kv of[frame[1],frame[3]]){for(var key in kv){if(key.startsWith('$$')||!key.startsWith('$')){ns[key]=kv[key]}}}} +return ns} $B.clear_ns=function(name){ if(name.startsWith("__ge")){console.log("clear ns",name)} var len=name.length @@ -5764,7 +6096,8 @@ var frame=$B.last($B.frames_stack) if(frame[1][name]!==undefined){return frame[1][name]} else if(frame[3][name]!==undefined){return frame[3][name]} else if(_b_[name]!==undefined){return _b_[name]} -else{if(frame[0]==frame[2]||frame[1].$type=="class"){throw _b_.NameError.$factory( +else{if(frame[0]==frame[2]||frame[1].$type=="class" || +frame[1].$exec_locals){throw _b_.NameError.$factory( "name '"+name+"' is not defined")} else{throw _b_.UnboundLocalError.$factory("local variable '"+ name+"' referenced before assignment")}}} @@ -5786,7 +6119,8 @@ else{throw _b_.UnboundLocalError.$factory("local variable '"+ $B.from_alias(name)+"' referenced before assignment")}} $B.$check_def=function(name,value){ if(value !==undefined){return value}else if(_b_[name]!==undefined){ -return _b_[name]} +return _b_[name]}else{var frame=$B.last($B.frames_stack) +if(frame[3][name]!==undefined){return frame[3][name]}} throw _b_.NameError.$factory("name '"+$B.from_alias(name)+ "' is not defined")} $B.$check_def_local=function(name,value){ @@ -5816,17 +6150,9 @@ throw _b_.NameError.$factory("free variable '"+$B.from_alias(name)+ $B.$JS2Py=function(src){if(typeof src==="number"){if(src % 1===0){return src} return _b_.float.$factory(src)} if(src===null ||src===undefined){return _b_.None} -var klass=$B.get_class(src) -if(klass !==undefined){if(klass===_b_.list){if(src.__class__){return src} -return $B.JSArray.$factory(src)}else if(klass===$B.JSObject){src=src.js}else{return src}} -if(typeof src=="object"){if($B.$isNode(src)){return $B.DOMNode.$factory(src)} -if($B.$isEvent(src)){return $B.$DOMEvent(src)} -if($B.$isNodeList(src)){return $B.DOMNode.$factory(src)} if(Array.isArray(src)&& -Object.getPrototypeOf(src)===Array.prototype){var res=[] -for(var i=0,len=src.length;i< len;i++){res.push($B.$JS2Py(src[i]))} -return res}} -return $B.JSObject.$factory(src)} +Object.getPrototypeOf(src)===Array.prototype){src.$brython_class="js" } +return src} $B.list_key=function(obj,key){key=$B.$GetInt(key) if(key < 0){key+=obj.length} var res=obj[key] @@ -5864,11 +6190,24 @@ if(obj[item]!==undefined){return obj[item]} else{index_error(obj)}} if(obj.$is_class){var class_gi=$B.$getattr(obj,"__class_getitem__",_b_.None) if(class_gi !==_b_.None){return class_gi(item)}else if(obj.__class__){class_gi=$B.$getattr(obj.__class__,"__getitem__",_b_.None) -if(class_gi !==_b_.None){return class_gi(obj,item)}}} +if(class_gi !==_b_.None){return class_gi(obj,item)}else{throw _b_.TypeError.$factory("'"+ +$B.class_name(obj.__class__)+ +"' object is not subscriptable")}}} +if(is_list){return _b_.list.$getitem(obj,item)} var gi=$B.$getattr(obj,"__getitem__",_b_.None) if(gi !==_b_.None){return gi(item)} throw _b_.TypeError.$factory("'"+$B.class_name(obj)+ "' object is not subscriptable")} +$B.getitem_slice=function(obj,slice){var res +if(Array.isArray(obj)){if(slice.start===_b_.None && slice.stop===_b_.None){if(slice.step===_b_.None ||slice.step==1){res=obj.slice()}else if(slice.step==-1){res=obj.slice().reverse()}}else if(slice.step===_b_.None){if(slice.start===_b_.None){slice.start=0} +if(slice.stop===_b_.None){slice.stop=obj.length} +if(typeof slice.start=="number" && +typeof slice.stop=="number"){if(slice.start < 0){slice.start+=obj.length} +if(slice.stop < 0){slice.stop+=obj.length} +res=obj.slice(slice.start,slice.stop)}} +if(res){res.__class__=obj.__class__ +return res}else{return _b_.list.$getitem(obj,slice)}} +return $B.$getattr(obj,"__getitem__")(slice)} $B.set_list_key=function(obj,key,value){try{key=$B.$GetInt(key)} catch(err){if(_b_.isinstance(key,_b_.slice)){var s=_b_.slice.$conv_for_seq(key,obj.length) return $B.set_list_slice_step(obj,s.start,s.stop,s.step,value)}} @@ -5887,12 +6226,8 @@ obj.splice.apply(obj,[start,stop-start].concat(res))} $B.set_list_slice_step=function(obj,start,stop,step,value){if(step===null ||step==1){return $B.set_list_slice(obj,start,stop,value)} if(step==0){throw _b_.ValueError.$factory("slice step cannot be zero")} step=$B.$GetInt(step) -if(start===null){start=step > 0 ? 0 :obj.length-1} -else{start=$B.$GetInt(start) -if(start < 0){start=Math.min(0,start+obj.length)}} -if(stop===null){stop=step > 0 ? obj.length :-1} -else{stop=$B.$GetInt(stop) -if(stop < 0){stop=Math.max(0,stop+obj.length)}} +if(start===null){start=step > 0 ? 0 :obj.length-1}else{start=$B.$GetInt(start)} +if(stop===null){stop=step > 0 ? obj.length :-1}else{stop=$B.$GetInt(stop)} var repl=_b_.list.$factory(value),j=0,test,nb=0 if(step > 0){test=function(i){return i < stop}} else{test=function(i){return i > stop}} @@ -5908,7 +6243,6 @@ typeof item=="number" && if(obj[item]===undefined){throw _b_.IndexError.$factory("list assignment index out of range")} obj[item]=value return}else if(obj.__class__===_b_.dict){_b_.dict.$setitem(obj,item,value) -return}else if(obj.__class__===$B.JSObject){$B.JSObject.__setattr__(obj,item,value) return}else if(obj.__class__===_b_.list){return _b_.list.$setitem(obj,item,value)} $B.$getattr(obj,"__setitem__")(item,value)} $B.augm_item_add=function(obj,item,incr){if(Array.isArray(obj)&& typeof item=="number" && @@ -5948,12 +6282,14 @@ $B.$test_expr=function(){ return $B.$test_result} $B.$is=function(a,b){ if(a instanceof Number && b instanceof Number){return a.valueOf()==b.valueOf()} +if((a===_b_.int && b==$B.long_int)|| +(a===$B.long_int && b===_b_.int)){return true} return a===b} $B.$is_member=function(item,_set){ -var f,_iter -try{f=$B.$getattr(_set,"__contains__")} +var f,_iter,method +try{method=$B.$getattr(_set.__class__ ||$B.get_class(_set),"__contains__")} catch(err){} -if(f){return f(item)} +if(method){return $B.$call(method)(_set,item)} try{_iter=_b_.iter(_set)} catch(err){} if(_iter){while(1){try{var elt=_b_.next(_iter) @@ -5966,34 +6302,37 @@ while(1){i++ try{var elt=f(i) if($B.rich_comp("__eq__",elt,item)){return true}}catch(err){if(err.__class__===_b_.IndexError){return false} throw err}}}} -$B.$call=function(callable){if(callable.__class__===$B.method){return callable} -else if(callable.$is_func ||typeof callable=="function"){return callable}else if(callable.$factory){return callable.$factory}else if(callable.$is_class){ -return callable.$factory=$B.$instance_creator(callable)}else if(callable.__class__===$B.JSObject){if(typeof(callable.js)=="function"){return callable.js}else{throw _b_.TypeError.$factory("'"+$B.class_name(callable)+ -"' object is not callable")}} +$B.$call=function(callable){if(callable.__class__===$B.method){return callable}else if(callable.$factory){return callable.$factory}else if(callable.$is_class){ +return callable.$factory=$B.$instance_creator(callable)}else if(callable.$is_js_class){ +return callable.$factory=function(){return new callable(...arguments)}}else if(callable.$is_func ||typeof callable=="function"){return callable} try{return $B.$getattr(callable,"__call__")}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(callable)+ "' object is not callable")}} -var $io=$B.make_class("io",function(){return{__class__:$io}} +var $io=$B.make_class("io",function(out){return{ +__class__:$io,out}} ) $io.flush=function(){} $io.write=function(self,msg){ -console.log(msg) +console[self.out](msg) return _b_.None} -$B.stderr=$io.$factory() -$B.stdout=$io.$factory() +if(console.error !==undefined){$B.stderr=$io.$factory("error")}else{$B.stderr=$io.$factory("log")} +$B.stdout=$io.$factory("log") $B.stdin={__class__:$io,__original__:true,closed:false,len:1,pos:0,read:function(){return ""},readline:function(){return ""}} $B.make_iterator_class=function(name){ var klass={__class__:_b_.type,__mro__:[_b_.object],$factory:function(items){return{ -__class__:klass,__dict__:_b_.dict.$factory(),counter:-1,items:items,len:items.length}},$infos:{__name__:name},$is_class:true,__iter__:function(self){self.counter=self.counter===undefined ?-1 :self.counter +__class__:klass,__dict__:$B.empty_dict(),counter:-1,items:items,len:items.length}},$infos:{__name__:name},$is_class:true,__iter__:function(self){self.counter=self.counter===undefined ?-1 :self.counter self.len=self.items.length return self},__len__:function(self){return self.items.length},__next__:function(self){if(typeof self.len_func=="function" && self.len_func()!=self.len){throw _b_.RuntimeError.$factory( "dictionary changed size during iteration")} self.counter++ -if(self.counter < self.items.length){return self.items[self.counter]} +if(self.counter < self.items.length){var item=self.items[self.counter] +if(self.items.$brython_class=="js"){ +item=$B.$JS2Py(item)} +return item} throw _b_.StopIteration.$factory("StopIteration")},__reduce_ex__:function(self,protocol){return $B.fast_tuple([_b_.iter,_b_.tuple.$factory([self.items])])}} $B.set_func_names(klass,"builtins") return klass} -function $err(op,klass,other){var msg="unsupported operand type(s) for "+op+": '"+ +function $err(op,klass,other){var msg="unsupported operand type(s) for "+op+" : '"+ klass.$infos.__name__+"' and '"+$B.class_name(other)+"'" throw _b_.TypeError.$factory(msg)} var ropnames=["add","sub","mul","truediv","floordiv","mod","pow","lshift","rshift","and","xor","or"] @@ -6013,16 +6352,29 @@ if(! value.$is_class){try{var v=$B.$getattr(value,"__int__")();return v}catch(e) try{var v=$B.$getattr(value,"__index__")();return v}catch(e){}} throw _b_.TypeError.$factory("'"+$B.class_name(value)+ "' object cannot be interpreted as an integer")} +$B.to_num=function(obj,methods){ +var expected_class={"__complex__":_b_.complex,"__float__":_b_.float,"__index__":_b_.int,"__int__":_b_.int} +var klass=obj.__class__ ||$B.get_class(obj) +for(var i=0;i < methods.length;i++){var missing={},method=$B.$getattr(klass,methods[i],missing) +if(method !==missing){var res=method(obj) +if(!_b_.isinstance(res,expected_class[methods[i]])){console.log(res,methods[i],expected_class[methods[i]]) +throw _b_.TypeError.$factory(methods[i]+"returned non-"+ +expected_class[methods[i]].$infos.__name__+ +"(type "+$B.get_class(res)+")")} +return res}} +return null} $B.PyNumber_Index=function(item){switch(typeof item){case "boolean": return item ? 1 :0 case "number": return item case "object": if(item.__class__===$B.long_int){return item} +if(_b_.isinstance(item,_b_.int)){ +return item.$brython_value} var method=$B.$getattr(item,"__index__",_b_.None) if(method !==_b_.None){method=typeof method=="function" ? method :$B.$getattr(method,"__call__") -return $B.int_or_bool(method)} +return $B.int_or_bool(method())} default: throw _b_.TypeError.$factory("'"+$B.class_name(item)+ "' object cannot be interpreted as an integer")}} @@ -6038,17 +6390,51 @@ default: throw _b_.TypeError.$factory("'"+$B.class_name(v)+ "' object cannot be interpreted as an integer")}} $B.enter_frame=function(frame){ -$B.frames_stack.push(frame)} +$B.frames_stack.push(frame) +if($B.tracefunc && $B.tracefunc !==_b_.None){if(frame[4]===$B.tracefunc || +($B.tracefunc.$infos && frame[4]&& +frame[4]===$B.tracefunc.$infos.__func__)){ +$B.tracefunc.$frame_id=frame[0] +return _b_.None}else{ +for(var i=$B.frames_stack.length-1;i >=0;i--){if($B.frames_stack[i][0]==$B.tracefunc.$frame_id){return _b_.None}} +return $B.tracefunc($B._frame.$factory($B.frames_stack,$B.frames_stack.length-1),'call',_b_.None)}} +return _b_.None} +$B.trace_exception=function(){var top_frame=$B.last($B.frames_stack) +if(top_frame[0]==$B.tracefunc.$current_frame_id){return _b_.None} +var trace_func=top_frame[1].$f_trace,exc=top_frame[1].$current_exception,frame_obj=$B._frame.$factory($B.frames_stack,$B.frames_stack.length-1) +return trace_func(frame_obj,'exception',$B.fast_tuple([exc.__class__,exc,$B.traceback.$factory(exc)]))} +$B.trace_line=function(){var top_frame=$B.last($B.frames_stack) +if(top_frame[0]==$B.tracefunc.$current_frame_id){return _b_.None} +var trace_func=top_frame[1].$f_trace,frame_obj=$B._frame.$factory($B.frames_stack,$B.frames_stack.length-1) +return trace_func(frame_obj,'line',_b_.None)} +$B.set_line=function(line_info){ +var top_frame=$B.last($B.frames_stack) +if($B.tracefunc && top_frame[0]==$B.tracefunc.$current_frame_id){return _b_.None} +top_frame[1].$line_info=line_info +var trace_func=top_frame[1].$f_trace +if(trace_func !==_b_.None){var frame_obj=$B._frame.$factory($B.frames_stack,$B.frames_stack.length-1) +top_frame[1].$ftrace=trace_func(frame_obj,'line',_b_.None)} +return true} +$B.trace_return=function(value){var top_frame=$B.last($B.frames_stack),trace_func=top_frame[1].$f_trace,frame_obj=$B._frame.$factory($B.frames_stack,$B.frames_stack.length-1) +if(top_frame[0]==$B.tracefunc.$current_frame_id){ +return _b_.None} +trace_func(frame_obj,'return',value)} function exit_ctx_managers_in_generators(frame){ -for(key in frame[1]){if(frame[1][key]&& frame[1][key].$is_generator_obj){var gen_obj=frame[1][key] -if(gen_obj.env !==undefined){for(var attr in gen_obj.env){if(attr.search(/^\$ctx_manager_exit\d+$/)>-1){ -$B.$call(gen_obj.env[attr])()}}}}}} +for(key in frame[1]){if(frame[1][key]&& frame[1][key].__class__==$B.generator){ +var gen_obj=frame[1][key] +gen_obj.return()}}} +$B.set_cm_in_generator=function(cm_exit){if(cm_exit !==undefined){$B.frames_stack.forEach(function(frame){frame[1].$cm_in_gen=frame[1].$cm_in_gen ||new Set() +frame[1].$cm_in_gen.add(cm_exit)})}} $B.leave_frame=function(arg){ -if($B.profile > 0){$B.$profile.return()} if($B.frames_stack.length==0){console.log("empty stack");return} -$B.del_exc() +if(arg && arg.value !==undefined && $B.tracefunc){if($B.last($B.frames_stack)[1].$f_trace===undefined){$B.last($B.frames_stack)[1].$f_trace=$B.tracefunc} +if($B.last($B.frames_stack)[1].$f_trace !==_b_.None){$B.trace_return(arg.value)}} var frame=$B.frames_stack.pop() -exit_ctx_managers_in_generators(frame)} +frame[1].$current_exception=undefined +if(frame[1].$close_generators){ +for(var i=0,len=frame[1].$close_generators.length;i < len;i++){var gen=frame[1].$close_generators[i] +if(gen.$has_run){gen.return()}}} +return _b_.None} $B.leave_frame_exec=function(arg){ if($B.profile > 0){$B.$profile.return()} if($B.frames_stack.length==0){console.log("empty stack");return} @@ -6061,20 +6447,17 @@ if(arg < min_int ||arg > max_int){return false}} return true} $B.add=function(x,y){if(typeof x.valueOf()=="number" && typeof y.valueOf()=="number"){if(typeof x=="number" && typeof y=="number"){ var z=x+y -if(z < max_int){return z} +if(z < $B.max_int && z > $B.min_int){return z}else if(z===Infinity){return _b_.float.$factory("inf")}else if(z===-Infinity){return _b_.float.$factory("-inf")}else if(isNaN(z)){return _b_.float.$factory('nan')} return $B.long_int.__add__($B.long_int.$factory(x),$B.long_int.$factory(y))}else{ return new Number(x+y)}}else if(typeof x=="string" && typeof y=="string"){ return x+y} -return _b_.getattr(x,"__add__")(y)} -$B.zzadd=function(x,y){var z=(typeof x !="number" ||typeof y !="number")? -new Number(x+y):x+y -if(x > min_int && x < max_int && y > min_int && y < max_int -&& z > min_int && z < max_int){return z} -else if((typeof x=="number" ||x.__class__===$B.long_int) -&&(typeof y=="number" ||y.__class__===$B.long_int)){if((typeof x=="number" && isNaN(x))|| -(typeof y=="number" && isNaN(y))){return _b_.float.$factory("nan")} -var res=$B.long_int.__add__($B.long_int.$factory(x),$B.long_int.$factory(y)) -return res}else{return z}} +try{var method=$B.$getattr(x.__class__ ||$B.get_class(x),"__add__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("unsupported operand type(s) for "+ +"+: '"+$B.class_name(x)+"' and '"+$B.class_name(y)+"'")} +throw err} +var res=$B.$call(method)(x,y) +if(res===_b_.NotImplemented){ +return $B.rich_op("add",x,y)} +return res} $B.div=function(x,y){var z=x/y if(x > min_int && x < max_int && y > min_int && y < max_int && z > min_int && z < max_int){return z} @@ -6092,13 +6475,16 @@ if(x > min_int && x < max_int && y > min_int && y < max_int else if((typeof x=="number" ||x.__class__===$B.long_int) &&(typeof y=="number" ||y.__class__===$B.long_int)){if((typeof x=="number" && isNaN(x))|| (typeof y=="number" && isNaN(y))){return _b_.float.$factory("nan")} +switch(x){case Infinity: +case-Infinity: +if(y==0){return _b_.float.$factory("nan")}else{return y > 0 ? x :-x}} return $B.long_int.__mul__($B.long_int.$factory(x),$B.long_int.$factory(y))}else{return z}} $B.sub=function(x,y){var z=(typeof x !="number" ||typeof y !="number")? new Number(x-y):x-y if(x > min_int && x < max_int && y > min_int && y < max_int -&& z > min_int && z < max_int){return z} -else if((typeof x=="number" ||x.__class__===$B.long_int) -&&(typeof y=="number" ||y.__class__===$B.long_int)){if((typeof x=="number" && isNaN(x))|| +&& z > min_int && z < max_int){return z}else if((typeof x=="number" ||x.__class__===$B.long_int) +&&(typeof y=="number" ||y.__class__===$B.long_int)){if(typeof x=="number" && typeof y=="number"){if(isNaN(x)||isNaN(y)){return _b_.float.$factory("nan")}else if(x===Infinity ||x===-Infinity){if(y===x){return _b_.float.$factory("nan")}else{return x}}else if(y===Infinity ||y===-Infinity){if(y===x){return _b_.float.$factory("nan")}else{return-y}}} +if((typeof x=="number" && isNaN(x))|| (typeof y=="number" && isNaN(y))){return _b_.float.$factory("nan")} return $B.long_int.__sub__($B.long_int.$factory(x),$B.long_int.$factory(y))}else{return z}} $B.ge=function(x,y){if(typeof x=="number" && typeof y=="number"){return x >=y} @@ -6123,7 +6509,7 @@ case "__ge__": return x1 >=y1 case "__gt__": return x1 > y1}} -var res,rev_op,compared=false +var res,rev_op if(x.$is_class ||x.$factory){if(op=="__eq__"){return(x===y)}else if(op=="__ne__"){return !(x===y)}else{throw _b_.TypeError.$factory("'"+method2comp[op]+ "' not supported between instances of '"+$B.class_name(x)+ "' and '"+$B.class_name(y)+"'")}} @@ -6131,11 +6517,9 @@ if(x.__class__ && y.__class__){ if(y.__class__.__mro__.indexOf(x.__class__)>-1){rev_op=reversed_op[op]||op var rev_func=$B.$getattr(y,rev_op) res=$B.$call($B.$getattr(y,rev_op))(x) -if(res !==_b_.NotImplemented){return res} -compared=true}} +if(res !==_b_.NotImplemented){return res}}} res=$B.$call($B.$getattr(x,op))(y) if(res !==_b_.NotImplemented){return res} -if(compared){return false} rev_op=reversed_op[op]||op res=$B.$call($B.$getattr(y,rev_op))(x) if(res !==_b_.NotImplemented ){return res} @@ -6146,10 +6530,11 @@ throw _b_.TypeError.$factory("'"+method2comp[op]+ "' and '"+$B.class_name(y)+"'")} var opname2opsign={sub:"-",xor:"^",mul:"*"} $B.rich_op=function(op,x,y){var x_class=x.__class__ ||$B.get_class(x),y_class=y.__class__ ||$B.get_class(y),method -if(x.__class__===y.__class__){ +if(x_class===y_class){ +if(x_class===_b_.int){return _b_.int["__"+op+"__"](x,y)} try{method=$B.$call($B.$getattr(x,"__"+op+"__"))}catch(err){if(err.__class__===_b_.AttributeError){var kl_name=$B.class_name(x) throw _b_.TypeError.$factory("unsupported operand type(s) "+ -"for "+opname2opsign[op]+": '"+kl_name+"' and '"+ +"for "+opname2opsign[op]+" : '"+kl_name+"' and '"+ kl_name+"'")} throw err} return method(y)} @@ -6166,34 +6551,28 @@ if(res !==_b_.NotImplemented){return res} throw _b_.TypeError.$factory("'"+(opname2opsign[op]||op)+ "' not supported between instances of '"+$B.class_name(x)+ "' and '"+$B.class_name(y)+"'")}else{return res}} -$B.is_none=function(o){return o===undefined ||o===null ||o==_b_.None}})(__BRYTHON__) -if(!Array.prototype.indexOf){Array.prototype.indexOf=function(obj,fromIndex){if(fromIndex < 0)fromIndex+=this.length -for(var i=fromIndex ||0,len=this.length;i < len;i++){if(this[i]===obj){return i}} -return-1}} -if(!String.prototype.repeat){String.prototype.repeat=function(count){if(count < 1){return ''} -var result='',pattern=this.valueOf() -while(count > 1){if(count & 1){result+=pattern} -count >>=1,pattern+=pattern} -return result+pattern}} +$B.is_none=function(o){return o===undefined ||o===null ||o==_b_.None} +var repr_stack=new Set() +$B.repr={enter:function(obj){if(repr_stack.has(obj)){return true}else{repr_stack.add(obj)}},leave:function(obj){repr_stack.delete(obj)}}})(__BRYTHON__) ; -;(function($B){var bltns=$B.InjectBuiltins() -eval(bltns) +;(function($B){var _b_=$B.builtins _b_.__debug__=false -var object=_b_.object,odga=object.__getattribute__ $B.$comps={'>':'gt','>=':'ge','<':'lt','<=':'le'} $B.$inv_comps={'>':'lt','>=':'le','<':'gt','<=':'ge'} -function check_nb_args(name,expected,args){ +var check_nb_args=$B.check_nb_args=function(name,expected,args){ var len=args.length,last=args[len-1] if(last && last.$nat=="kw"){var kw=last.kw if(Array.isArray(kw)&& kw[1]&& kw[1].__class__===_b_.dict){if(Object.keys(kw[1].$string_dict).length==0){len--}}} if(len !=expected){if(expected==0){throw _b_.TypeError.$factory(name+"() takes no argument"+ -" ("+got+" given)")}else{throw _b_.TypeError.$factory(name+"() takes exactly "+ +" ("+len+" given)")}else{throw _b_.TypeError.$factory(name+"() takes exactly "+ expected+" argument"+(expected < 2 ? '' :'s')+ " ("+len+" given)")}}} -function check_no_kw(name,x,y){ -if(x.$nat ||(y !==undefined && y.$nat)){throw _b_.TypeError.$factory(name+"() takes no keyword arguments")}} -var NoneType={$factory:function(){return None},$infos:{__name__:"NoneType",__module__:"builtins"},__bool__:function(self){return False},__class__:_b_.type,__hash__:function(self){return 0},__mro__:[object],__repr__:function(self){return 'None'},__str__:function(self){return 'None'},$is_class:true} +var check_no_kw=$B.check_no_kw=function(name,x,y){ +if(x===undefined){console.log("x undef",name,x,y)} +if((x.$nat && x.kw && x.kw[0]&& x.kw[0].length > 0)|| +(y !==undefined && y.$nat)){throw _b_.TypeError.$factory(name+"() takes no keyword arguments")}} +var NoneType={$factory:function(){return None},$infos:{__name__:"NoneType",__module__:"builtins"},__bool__:function(self){return False},__class__:_b_.type,__hash__:function(self){return 0},__mro__:[_b_.object],__repr__:function(self){return 'None'},__str__:function(self){return 'None'},$is_class:true} NoneType.__setattr__=function(self,attr){return no_set_attr(NoneType,attr)} var None={__class__:NoneType,} for(var $op in $B.$comps){ @@ -6210,10 +6589,13 @@ function abs(obj){check_nb_args('abs',1,arguments) check_no_kw('abs',obj) if(isinstance(obj,_b_.int)){if(obj.__class__===$B.long_int){return{ __class__:$B.long_int,value:obj.value,pos:true}}else{return _b_.int.$factory(Math.abs(obj))}} -if(isinstance(obj,_b_.float)){return _b_.float.$factory(Math.abs(obj))} -if(hasattr(obj,'__abs__')){return $B.$getattr(obj,'__abs__')()} -throw _b_.TypeError.$factory("Bad operand type for abs(): '"+ -$B.get_class(obj)+"'")} +if(isinstance(obj,_b_.float)){ +return _b_.float.$factory(Math.abs(_b_.float.numerator(obj)))} +var klass=obj.__class__ ||$B.get_class(obj) +try{var method=$B.$getattr(klass,"__abs__")}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("Bad operand type for abs(): '"+ +$B.class_name(obj)+"'")} +throw err} +return $B.$call(method)(obj)} function all(obj){check_nb_args('all',1,arguments) check_no_kw('all',obj) var iterable=iter(obj) @@ -6243,25 +6625,36 @@ case 16: prefix='0x';break default: console.log('invalid base:'+base)} -if(obj.__class__===$B.long_int){if(obj.pos){return prefix+$B.long_int.to_base(obj,base)} -return '-'+prefix+$B.long_int.to_base(-obj,base)} +if(obj.__class__===$B.long_int){var res=prefix+$B.long_int.to_base(obj,base) +if(! obj.pos){res="-"+res} +return res} var value=$B.$GetInt(obj) if(value===undefined){ throw _b_.TypeError.$factory('Error, argument must be an integer or'+ ' contains an __index__ function')} if(value >=0){return prefix+value.toString(base)} return '-'+prefix+(-value).toString(base)} +function bin_hex_oct(base,obj){ +if(isinstance(obj,_b_.int)){return $builtin_base_convert_helper(obj,base)}else{try{var klass=obj.__class__ ||$B.get_class(obj),method=$B.$getattr(klass,'__index__')}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+ +"' object cannot be interpreted as an integer")} +throw err} +var res=$B.$call(method)(obj) +return $builtin_base_convert_helper(res,base)}} function bin(obj){check_nb_args('bin',1,arguments) check_no_kw('bin',obj) -if(isinstance(obj,_b_.int)){return $builtin_base_convert_helper(obj,2)} -return $B.$getattr(obj,'__index__')()} +return bin_hex_oct(2,obj)} +function breakpoint(){ +$B.$import('sys',[]) +var missing={},hook=$B.$getattr($B.imported.sys,'breakpointhook',missing) +if(hook===missing){throw _b_.RuntimeError.$factory('lost sys.breakpointhook')} +return $B.$call(hook).apply(null,arguments)} function callable(obj){check_nb_args('callable',1,arguments) check_no_kw('callable',obj) return hasattr(obj,'__call__')} function chr(i){check_nb_args('chr',1,arguments) check_no_kw('chr',i) if(i < 0 ||i > 1114111){throw _b_.ValueError.$factory('Outside valid range')} -return String.fromCharCode(i)} +return String.fromCodePoint(i)} var classmethod=$B.make_class("classmethod",function(func){check_nb_args('classmethod',1,arguments) check_no_kw('classmethod',func) var f=function(){return func.apply(null,arguments)} @@ -6278,14 +6671,19 @@ return f} ) $B.set_func_names(classmethod,"builtins") var code=$B.code=$B.make_class("code") -code.__repr__=code.__str__=function(self){return ''} -code.__getattr__=function(self,attr){if(attr=="co_code"){return 'co_code'} -return self[attr]} -function compile(){var $=$B.args('compile',6,{source:null,filename:null,mode:null,flags:null,dont_inherit:null,optimize:null},['source','filename','mode','flags','dont_inherit','optimize'],arguments,{flags:0,dont_inherit:false,optimize:-1},null,null) +code.__repr__=code.__str__=function(self){return ''} +code.__getattribute__=function(self,attr){return self[attr]} +$B.set_func_names(code,"builtins") +function compile(){var $=$B.args('compile',6,{source:null,filename:null,mode:null,flags:null,dont_inherit:null,optimize:null,_feature_version:null},['source','filename','mode','flags','dont_inherit','optimize','_feature_version'],arguments,{flags:0,dont_inherit:false,optimize:-1,_feature_version:0},null,null) var module_name='$exec_'+$B.UUID() $B.clear_ns(module_name) $.__class__=code $.co_flags=$.flags +$.name="" +var interactive=$.mode=="single" &&($.flags & 0x200) +if(interactive && ! $.source.endsWith("\n")){ +var lines=$.source.split("\n") +if($B.last(lines).startsWith(" ")){throw _b_.SyntaxError.$factory("unexpected EOF while parsing")}} $B.py2js($.source,module_name,module_name) return $} var __debug__=$B.debug > 0 @@ -6296,9 +6694,8 @@ if(typeof attr !='string'){throw _b_.TypeError.$factory("attribute name must be $B.class_name(attr)+"'")} return $B.$getattr(obj,'__delattr__')(attr)} $B.$delete=function(name,is_global){ -function del(obj){ -if(obj.$is_generator_obj && obj.env){for(var attr in obj.env){if(attr.search(/^\$ctx_manager_exit\d+$/)>-1){$B.$call(obj.env[attr])() -delete obj.env[attr]}}}} +function del(obj){if(obj.__class__===$B.generator){ +obj.return()}} var found=false,frame=$B.last($B.frames_stack) if(! is_global){if(frame[1][name]!==undefined){found=true del(frame[1][name]) @@ -6307,8 +6704,9 @@ del(frame[3][name]) delete frame[3][name]}} if(!found){throw _b_.NameError.$factory(name)}} function dir(obj){if(obj===undefined){ -var frame=$B.last($B.frames_stack),globals_obj=frame[3],res=_b_.list.$factory(),pos=0 -for(var attr in globals_obj){if(attr.charAt(0)=='$' && attr.charAt(1)!='$'){ +var frame=$B.last($B.frames_stack) +locals_obj=frame[1],res=_b_.list.$factory(),pos=0 +for(var attr in locals_obj){if(attr.charAt(0)=='$' && attr.charAt(1)!='$'){ continue} res[pos++]=attr} _b_.list.sort(res) @@ -6321,9 +6719,7 @@ var dir_func=$B.$getattr(obj.__class__,"__dir__") return $B.$call(dir_func)(obj)} try{var res=$B.$call($B.$getattr(obj,'__dir__'))() res=_b_.list.$factory(res) -res.sort() -return res}catch(err){ -console.log(err)} +return res}catch(err){} var res=[],pos=0 for(var attr in obj){if(attr.charAt(0)!=='$' && attr !=='__class__' && obj[attr]!==undefined){res[pos++]=attr}} @@ -6332,6 +6728,8 @@ return res} function divmod(x,y){check_no_kw('divmod',x,y) check_nb_args('divmod',2,arguments) var klass=x.__class__ ||$B.get_class(x) +var dm=$B.$getattr(klass,"__divmod__",_b_.None) +if(dm !==_b_.None){return dm(x,y)} return _b_.tuple.$factory([$B.$getattr(klass,'__floordiv__')(x,y),$B.$getattr(klass,'__mod__')(x,y)])} var enumerate=$B.make_class("enumerate",function(){var $ns=$B.args("enumerate",2,{iterable:null,start:null},['iterable','start'],arguments,{start:0},null,null),_iter=iter($ns["iterable"]),start=$ns["start"] return{ @@ -6346,16 +6744,19 @@ $B.from_alias=function(attr){if(attr.substr(0,2)=='$$' && $B.aliased_names[attr. return attr} $B.to_alias=function(attr){if($B.aliased_names[attr]){return '$$'+attr} return attr} -function $$eval(src,_globals,_locals){var $=$B.args("eval",4,{src:null,globals:null,locals:null,is_exec:null},["src","globals","locals","is_exec"],arguments,{globals:_b_.None,locals:_b_.None,is_exec:false},null,null),src=$.src,_globals=$.globals,_locals=$.locals,is_exec=$.is_exec +function $$eval(src,_globals,_locals){var $=$B.args("eval",4,{src:null,globals:null,locals:null,mode:null},["src","globals","locals","mode"],arguments,{globals:_b_.None,locals:_b_.None,mode:"eval"},null,null),src=$.src,_globals=$.globals,_locals=$.locals,mode=$.mode +if($.src.mode && $.src.mode=="single" && +["",""].indexOf($.src.filename)>-1){ +_b_.print(">",$.src.source.trim())} var current_frame=$B.frames_stack[$B.frames_stack.length-1] -if(current_frame !==undefined){var current_locals_id=current_frame[0].replace(/\./,'_'),current_globals_id=current_frame[2].replace(/\./,'_')} +if(current_frame !==undefined){var current_locals_id=current_frame[0].replace(/\./g,'_'),current_globals_id=current_frame[2].replace(/\./g,'_')} var stack_len=$B.frames_stack.length -if(src.__class__===code){is_exec=src.mode=="exec" +if(src.__class__===code){mode=src.mode src=src.source}else if(typeof src !=='string'){throw _b_.TypeError.$factory("eval() arg 1 must be a string, bytes "+ "or code object")} -var globals_id='$exec_'+$B.UUID(),locals_id='$exec_'+$B.UUID(),parent_scope +var globals_id='$exec_'+$B.UUID(),globals_name=globals_id,locals_id='$exec_'+$B.UUID(),parent_scope if(_globals===_b_.None){if(current_locals_id==current_globals_id){locals_id=globals_id} -var local_scope={module:locals_id,id:locals_id,binding:{},bindings:{}} +var local_scope={module:globals_id,id:locals_id,binding:{},bindings:{}} for(var attr in current_frame[1]){local_scope.binding[attr]=true local_scope.bindings[attr]=true} var global_scope={module:globals_id,id:globals_id,binding:{},bindings:{}} @@ -6366,9 +6767,9 @@ global_scope.parent_block=$B.builtins_scope parent_scope=local_scope eval("$locals_"+parent_scope.id+" = current_frame[1]")}else{ if(_globals.__class__ !=_b_.dict){throw _b_.TypeError.$factory("exec() globals must be a dict, not "+ -$B.get_class(_globals).$infos.__name__)} -_globals.globals_id=_globals.globals_id ||globals_id -globals_id=_globals.globals_id +$B.class_name(_globals))} +if(_globals.globals_id){globals_id=globals_name=_globals.globals_id} +_globals.globals_id=globals_id if(_locals===_globals ||_locals===_b_.None){locals_id=globals_id parent_scope=$B.builtins_scope}else{ var grandparent_scope={id:globals_id,parent_block:$B.builtins_scope,binding:{}} @@ -6378,27 +6779,35 @@ for(var attr in _locals.$string_dict){parent_scope.binding[attr]=true}}} $B.$py_module_path[globals_id]=$B.$py_module_path[current_globals_id] eval('var $locals_'+globals_id+' = {}\nvar $locals_'+ locals_id+' = {}') -if(_globals===_b_.None){var gobj=current_frame[3],ex='var $locals_'+globals_id+' = gobj;' +if(_globals===_b_.None){var gobj=current_frame[3],ex='var $locals_'+globals_id+' = gobj;',obj={} eval(ex) -for(var attr in gobj){if((! attr.startsWith("$"))||attr.startsWith('$$')){eval("$locals_"+globals_id+"[attr] = gobj[attr]")}}}else{if(_globals.$jsobj){var items=_globals.$jsobj} -else{var items=_globals.$string_dict} -eval("$locals_"+globals_id+" = _globals.$string_dict") +for(var attr in gobj){if(attr.startsWith("$")&& !attr.startsWith("$$")){continue} +obj[attr]=gobj[attr]} +eval("$locals_"+globals_id+" = obj")}else{var globals_is_dict=false +if(_globals.$jsobj){var items=_globals.$jsobj}else{var items=_b_.dict.$to_obj(_globals) +_globals.$jsobj=items +globals_is_dict=true} +eval("$locals_"+globals_id+" = _globals.$jsobj") for(var item in items){var item1=$B.to_alias(item) -try{eval('$locals_'+globals_id+'["'+item1+ -'"] = items[item]')}catch(err){console.log(err) +try{eval('$locals_'+globals_id+'["'+item+'"] = items.'+item)}catch(err){console.log(err) console.log('error setting',item) break}}} -if(_locals===_b_.None){if(_globals !==_b_.None){eval('var $locals_'+locals_id+' = $locals_'+globals_id)}else{var lobj=current_frame[1],ex='' +_globals.$is_namespace=true +if(_locals===_b_.None){if(_globals !==_b_.None){eval('var $locals_'+locals_id+' = $locals_'+globals_id)}else{var lobj=current_frame[1],ex='',obj={} for(var attr in current_frame[1]){if(attr.startsWith("$")&& !attr.startsWith("$$")){continue} -ex+='$locals_'+locals_id+'["'+attr+ -'"] = current_frame[1]["'+attr+'"];' -eval(ex)}}}else{if(_locals.$jsobj){var items=_locals.$jsobj} -else{var items=_locals.$string_dict} +obj[attr]=lobj[attr]} +eval('$locals_'+locals_id+" = obj")}}else{var locals_is_dict=false +if(_locals.$jsobj){var items=_locals.$jsobj}else{locals_id_dict=true +var items=_b_.dict.$to_obj(_locals) +_locals.$jsobj=items} for(var item in items){var item1=$B.to_alias(item) try{eval('$locals_'+locals_id+'["'+item+'"] = items.'+item)}catch(err){console.log(err) console.log('error setting',item) -break}}} -eval("$locals_"+locals_id+".$src = src") +break}} +eval("$locals_"+locals_id+".$exec_locals = true")} +_locals.$is_namespace=true +if(_globals===_b_.None && _locals===_b_.None && +current_frame[0]==current_frame[2]){}else{eval("$locals_"+locals_id+".$src = src")} var root=$B.py2js(src,globals_id,locals_id,parent_scope),js,gns,lns if(_globals !==_b_.None && _locals==_b_.None){for(var attr in _globals.$string_dict){root.binding[attr]=true}} try{ @@ -6413,23 +6822,35 @@ root.children.splice(root.children.length-2,2) for(var i=0;i < children.length-1;i++){root.add(children[i])} break default: -if(!is_exec){throw _b_.SyntaxError.$factory( +if(mode=="eval"){throw _b_.SyntaxError.$factory( "eval() argument must be an expression",'',1,1,src)}} +if(mode !="eval"){ +var last=$B.last(root.children),js=last.to_js() +if(["node_js"].indexOf(last.C.type)==-1){last.to_js=function(){while(js.endsWith("\n")){js=js.substr(0,js.length-1)} +while(js.endsWith(";")){js=js.substr(0,js.length-1)} +return "return ("+js+")"}} js=root.to_js() -if(is_exec){var locals_obj=eval("$locals_"+locals_id),globals_obj=eval("$locals_"+globals_id) -if(_globals===_b_.None){var res=new Function("$locals_"+globals_id,"$locals_"+locals_id,js)(globals_obj,locals_obj)}else{current_globals_obj=current_frame[3] +var locals_obj=eval("$locals_"+locals_id),globals_obj=eval("$locals_"+globals_id) +if(_globals===_b_.None){var res=new Function("$locals_"+globals_id,"$locals_"+locals_id,js)( +globals_obj,locals_obj)}else{current_globals_obj=current_frame[3] current_locals_obj=current_frame[1] -var res=new Function("$locals_"+globals_id,"$locals_"+locals_id,"$locals_"+current_globals_id,"$locals_"+current_locals_id,js)(globals_obj,locals_obj,current_globals_obj,current_locals_obj)}}else{var res=eval(js)} +var res=new Function("$locals_"+globals_id,"$locals_"+locals_id,"$locals_"+current_globals_id,"$locals_"+current_locals_id,js)(globals_obj,locals_obj,current_globals_obj,current_locals_obj)} +if($.src.mode && $.src.mode=="single" && +$.src.filename==""){if(res !==_b_.None && res !==undefined){_b_.print(_b_.repr(res))}}}else{js=root.to_js() +var res=eval(js)} +if($.src.filename=="" && $.src.mode=="single" && +res !==undefined && res !==_b_.None){_b_.print(res)} gns=eval("$locals_"+globals_id) if($B.frames_stack[$B.frames_stack.length-1][2]==globals_id){gns=$B.frames_stack[$B.frames_stack.length-1][3]} if(_locals !==_b_.None){lns=eval("$locals_"+locals_id) for(var attr in lns){var attr1=$B.from_alias(attr) -if(attr1.charAt(0)!='$'){if(_locals.$jsobj){_locals.$jsobj[attr]=lns[attr]} -else{_locals.$string_dict[attr1]=lns[attr]}}}}else{for(var attr in lns){if(attr !=="$src"){current_frame[1][attr]=lns[attr]}}} +if(attr1.charAt(0)!='$'){if(_locals.$jsobj){_locals.$jsobj[attr]=lns[attr]}else{_b_.dict.$setitem(_locals,attr1,lns[attr])}}}}else{for(var attr in lns){if(attr !=="$src"){current_frame[1][attr]=lns[attr]}}} if(_globals !==_b_.None){ +if(globals_is_dict){var jsobj=_globals.$jsobj +delete _globals.$jsobj} for(var attr in gns){attr1=$B.from_alias(attr) -if(attr1.charAt(0)!='$'){if(_globals.$jsobj){_globals.$jsobj[attr]=gns[attr]} -else{_globals.$string_dict[attr1]=gns[attr]}}}}else{for(var attr in gns){if(attr !=="$src"){current_frame[3][attr]=gns[attr]}}} +if(attr1.charAt(0)!='$'){if(globals_is_dict){_b_.dict.$setitem(_globals,attr,gns[attr])}else{_globals.$jsobj[attr1]=gns[attr]}}} +for(var attr in _globals.$string_dict){if(attr.startsWith("$")&& !attr.startsWith("$$")){delete _globals.$string_dict[attr]}}}else{for(var attr in gns){if(attr !=="$src"){current_frame[3][attr]=gns[attr]}}} if(res===undefined){return _b_.None} return res}catch(err){err.src=src err.module=globals_id @@ -6445,7 +6866,7 @@ $B.clear_ns(locals_id)}} $$eval.$is_func=true function exec(src,globals,locals){var missing={} var $=$B.args("exec",3,{src:null,globals:null,locals:null},["src","globals","locals"],arguments,{globals:_b_.None,locals:_b_.None},null,null),src=$.src,globals=$.globals,locals=$.locals -return $$eval(src,globals,locals,true)||_b_.None} +return $$eval(src,globals,locals,"exec")||_b_.None} exec.$is_func=true function exit(){throw _b_.SystemExit} exit.__repr__=exit.__str__=function(){return "Use exit() or Ctrl-Z plus Return to exit"} @@ -6460,11 +6881,12 @@ filter.__iter__=function(self){return self} filter.__next__=function(self){while(true){var _item=next(self.iterable) if(self.func(_item)){return _item}}} $B.set_func_names(filter,"builtins") -function format(value,format_spec){var args=$B.args("format",2,{value:null,format_spec:null},["value","format_spec"],arguments,{format_spec:''},null,null) -var fmt=$B.$getattr(args.value,'__format__',null) -if(fmt !==null){return fmt(args.format_spec)} -throw _b_.NotImplementedError("__format__ is not implemented for object '"+ -_b_.str.$factory(args.value)+"'")} +function format(value,format_spec){var $=$B.args("format",2,{value:null,format_spec:null},["value","format_spec"],arguments,{format_spec:''},null,null) +var klass=value.__class__ ||$B.get_class(value) +try{var method=$B.$getattr(klass,'__format__')}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.NotImplementedError("__format__ is not implemented "+ +"for object '"+_b_.str.$factory(value)+"'")} +throw err} +return $B.$call(method)(value,$.format_spec)} function attr_error(attr,cname){var msg="bad operand type for unary #: '"+cname+"'" switch(attr){case '__neg__': throw _b_.TypeError.$factory(msg.replace('#','-')) @@ -6488,8 +6910,13 @@ var mro=klass.__mro__ for(var i=0,len=mro.length;i < len;i++){if(mro[i].hasOwnProperty(attr)){return mro[i][attr]}} return false} $B.$getattr=function(obj,attr,_default){ -var rawname=attr attr=$B.to_alias(attr) +if(obj.$method_cache && +obj.$method_cache[attr]&& +obj.__class__ && +obj.__class__[attr]==obj.$method_cache[attr][1]){ +return obj.$method_cache[attr][0]} +var rawname=attr if(obj===undefined){console.log("get attr",attr,"of undefined")} var is_class=obj.$is_class ||obj.$factory var klass=obj.__class__ @@ -6498,10 +6925,11 @@ if($test){console.log("$getattr",attr,obj,klass)} if(klass !==undefined && klass.__bases__ && (klass.__bases__.length==0 || (klass.__bases__.length==1 && -klass.__bases__[0]===_b_.object))){if(obj.hasOwnProperty(attr)){return obj[attr]}else if(obj.__dict__ && +klass.__bases__[0]===_b_.object))){if($test){console.log("class without parent")} +if(obj.hasOwnProperty(attr)){return obj[attr]}else if(obj.__dict__ && obj.__dict__.$string_dict.hasOwnProperty(attr)&& !(klass.hasOwnProperty(attr)&& -klass[attr].__get__)){return obj.__dict__.$string_dict[attr]}else if(klass.hasOwnProperty(attr)){if(typeof klass[attr]!="function" && +klass[attr].__get__)){return obj.__dict__.$string_dict[attr][0]}else if(klass.hasOwnProperty(attr)){if(typeof klass[attr]!="function" && attr !="__dict__" && klass[attr].__get__===undefined){var kl=klass[attr].__class__ if(! in_mro(kl,"__get__")){return klass[attr]}}}} @@ -6527,7 +6955,11 @@ break case '__class__': return klass case '__dict__': -if(is_class){return $B.mappingproxy.$factory(obj)}else{if(obj.hasOwnProperty(attr)){return obj[attr]}else if(obj.$infos){if(obj.$infos.hasOwnProperty("__dict__")){return obj.$infos.__dict__}else if(obj.$infos.hasOwnProperty("__func__")){return obj.$infos.__func__.$infos.__dict__}} +if(is_class){var proxy={} +for(var key in obj){var key1=$B.from_alias(key) +if(! key1.startsWith("$")){proxy[key1]=obj[key]}} +proxy.__dict__=$B.getset_descriptor.$factory(obj,"__dict__") +return $B.mappingproxy.$factory(proxy)}else{if(obj.hasOwnProperty(attr)){return obj[attr]}else if(obj.$infos){if(obj.$infos.hasOwnProperty("__dict__")){return obj.$infos.__dict__}else if(obj.$infos.hasOwnProperty("__func__")){return obj.$infos.__func__.$infos.__dict__}} return $B.obj_dict(obj)} case '__doc__': for(var i=0;i < builtin_names.length;i++){if(obj===_b_[builtin_names[i]]){_get_builtins_doc() @@ -6535,40 +6967,40 @@ return $B.builtins_doc[builtin_names[i]]}} break case '__mro__': if(obj.$is_class){ -return _b_.tuple.$factory([obj].concat(obj.__mro__))} -break +return _b_.tuple.$factory([obj].concat(obj.__mro__))}else if(obj.__dict__ && +obj.__dict__.$string_dict.__mro__ !==undefined){return obj.__dict__.$string_dict.__mro__} +throw _b_.AttributeError.$factory(attr) case '__subclasses__': if(klass.$factory ||klass.$is_class){var subclasses=obj.$subclasses ||[] return function(){return subclasses}} -break -case '$$new': -if(klass===$B.JSObject && obj.js_func !==undefined){return $B.JSConstructor.$factory(obj)} break} if(typeof obj=='function'){var value=obj[attr] if(value !==undefined){if(attr=='__module__'){return value}}} -if((! is_class)&& klass.$native){if($test){console.log("native class",klass,klass[attr])} +if((! is_class)&& klass.$native){if(obj.$method_cache && obj.$method_cache[attr]){return obj.$method_cache[attr]} +if($test){console.log("native class",klass,klass[attr])} +if(attr=="__doc__" && klass[attr]===undefined && klass.$infos){_get_builtins_doc() +klass[attr]=$B.builtins_doc[klass.$infos.__name__]} if(klass[attr]===undefined){var object_attr=_b_.object[attr] if($test){console.log("object attr",object_attr)} -if(object_attr !==undefined){klass[attr]=object_attr} -else{if($test){console.log("obj[attr]",obj[attr])} +if(object_attr !==undefined){klass[attr]=object_attr}else{if($test){console.log("obj[attr]",obj[attr])} var attrs=obj.__dict__ if(attrs && -(object_attr=attrs.$string_dict[attr])!==undefined){return object_attr} +(object_attr=attrs.$string_dict[attr])!==undefined){return object_attr[0]} if(_default===undefined){attr_error(attr,klass.$infos.__name__)} return _default}} if(klass.$descriptors && klass.$descriptors[attr]!==undefined){return klass[attr](obj)} if(typeof klass[attr]=='function'){var func=klass[attr] if(attr=='__new__'){func.$type="staticmethod"} if(func.$type=="staticmethod"){return func} -var self=klass[attr].__class__==$B.method ? klass :obj -function method(){var args=[self] -for(var i=0,len=arguments.length;i < len;i++){args.push(arguments[i])} -return klass[attr].apply(null,args)} +var self=klass[attr].__class__==$B.method ? klass :obj,method=klass[attr].bind(null,self) method.__class__=$B.method method.$infos={__func__:func,__name__:attr,__self__:self,__qualname__:klass.$infos.__name__+"."+attr} -return method} -attr_error(rawname,klass.$infos.__name__) -return klass[attr]} +if(typeof obj=="object"){ +obj.__class__=klass +obj.$method_cache=obj.$method_cache ||{} +obj.$method_cache[attr]=method} +return method}else if(klass[attr]!==undefined){return klass[attr]} +attr_error(rawname,klass.$infos.__name__)} var mro,attr_func if(is_class){attr_func=_b_.type.__getattribute__ }else{attr_func=klass.__getattribute__ if(attr_func===undefined){var mro=klass.__mro__ @@ -6576,6 +7008,7 @@ if(mro===undefined){console.log(obj,attr,"no mro, klass",klass)} for(var i=0,len=mro.length;i < len;i++){attr_func=mro[i]['__getattribute__'] if(attr_func !==undefined){break}}}} if(typeof attr_func !=='function'){console.log(attr+' is not a function '+attr_func,klass)} +var odga=_b_.object.__getattribute__ if($test){console.log("attr_func is odga",attr_func,attr_func===odga,obj[attr])} if(attr_func===odga){var res=obj[attr] if(Array.isArray(obj)&& Array.prototype[attr]!==undefined){ @@ -6594,7 +7027,10 @@ if(is_class){cname=obj.$infos.__name__} attr_error(rawname,cname)} function globals(){ check_nb_args('globals',0,arguments) -return $B.obj_dict($B.last($B.frames_stack)[3])} +var res=$B.obj_dict($B.last($B.frames_stack)[3]) +res.$jsobj.__BRYTHON__=$B.JSObj.$factory($B) +res.$is_namespace=true +return res} function hasattr(obj,attr){check_no_kw('hasattr',obj,attr) check_nb_args('hasattr',2,arguments) try{$B.$getattr(obj,attr);return true} @@ -6604,25 +7040,28 @@ function hash(obj){check_no_kw('hash',obj) check_nb_args('hash',1,arguments) if(obj.__hashvalue__ !==undefined){return obj.__hashvalue__} if(isinstance(obj,_b_.bool)){return _b_.int.$factory(obj)} -if(isinstance(obj,_b_.int)){return obj.valueOf()} +if(isinstance(obj,_b_.int)){if(obj.$brython_value===undefined){return obj.valueOf()}else{ +return obj.__hashvalue__=obj.$brython_value}} if(obj.$is_class || obj.__class__===_b_.type || obj.__class__===$B.Function){return obj.__hashvalue__=$B.$py_next_hash--} if(typeof obj=="string"){var cached=hash_cache[obj] if(cached !==undefined){return cached} else{return hash_cache[obj]=_b_.str.__hash__(obj)}} -var hashfunc=$B.$getattr(obj,'__hash__',_b_.None) -if(hashfunc==_b_.None){throw _b_.TypeError.$factory("unhashable type: '"+ -$B.class_name(obj)+"'",'hash')} -if(hashfunc.$infos===undefined){return obj.__hashvalue__=hashfunc()} -if(hashfunc.$infos.__func__===_b_.object.__hash__){if($B.$getattr(obj,'__eq__').$infos.__func__ !==_b_.object.__eq__){throw _b_.TypeError.$factory("unhashable type: '"+ -$B.class_name(obj)+"'",'hash')}else{return _b_.object.__hash__(obj)}}else{return hashfunc()}} +var klass=obj.__class__ ||$B.get_class(obj) +if(klass===undefined){throw _b_.TypeError.$factory("unhashable type: '"+ +_b_.str.$factory($B.JSObj.$factory(obj))+"'")} +var hash_method=$B.$getattr(klass,'__hash__',_b_.None) +if(hash_method===_b_.None){throw _b_.TypeError.$factory("unhashable type: '"+ +$B.class_name(obj)+"'")} +if(hash_method.$infos.__func__===_b_.object.__hash__){if($B.$getattr(obj,'__eq__').$infos.__func__ !==_b_.object.__eq__){throw _b_.TypeError.$factory("unhashable type: '"+ +$B.class_name(obj)+"'",'hash')}else{return obj.__hashvalue__=_b_.object.__hash__(obj)}}else{return $B.$call(hash_method)(obj)}} function _get_builtins_doc(){if($B.builtins_doc===undefined){ var url=$B.brython_path if(url.charAt(url.length-1)=='/'){url=url.substr(0,url.length-1)} url+='/builtins_docstrings.js' var f=_b_.open(url) -eval(f.$content) +eval(f.$string) $B.builtins_doc=docs}} function help(obj){if(obj===undefined){obj='help'} if(typeof obj=='string' && _b_[obj]!==undefined){_get_builtins_doc() @@ -6639,9 +7078,9 @@ try{return $B.$getattr(obj,'__doc__')} catch(err){return ''}} help.__repr__=help.__str__=function(){return "Type help() for interactive help, or help(object) "+ "for help about object."} -function hex(x){check_no_kw('hex',x) +function hex(obj){check_no_kw('hex',obj) check_nb_args('hex',1,arguments) -return $builtin_base_convert_helper(x,16)} +return bin_hex_oct(16,obj)} function id(obj){check_no_kw('id',obj) check_nb_args('id',1,arguments) if(isinstance(obj,[_b_.str,_b_.int,_b_.float])&& @@ -6650,13 +7089,20 @@ else{return obj.$id=$B.UUID()}} function __import__(mod_name,globals,locals,fromlist,level){ var $=$B.args('__import__',5,{name:null,globals:null,locals:null,fromlist:null,level:null},['name','globals','locals','fromlist','level'],arguments,{globals:None,locals:None,fromlist:_b_.tuple.$factory(),level:0},null,null) return $B.$__import__($.name,$.globals,$.locals,$.fromlist)} -function input(msg){return prompt(msg ||'')||''} +function input(msg){var res=prompt(msg ||'')||'' +if($B.imported["sys"]&& $B.imported["sys"].ps1){ +var ps1=$B.imported["sys"].ps1,ps2=$B.imported["sys"].ps2 +if(msg==ps1 ||msg==ps2){console.log(msg,res)}} +return res} function isinstance(obj,cls){check_no_kw('isinstance',obj,cls) check_nb_args('isinstance',2,arguments) if(obj===null){return cls===None} if(obj===undefined){return false} if(cls.constructor===Array){for(var i=0;i < cls.length;i++){if(isinstance(obj,cls[i])){return true}} return false} +if(cls.__class__===$B.GenericAlias){ +throw _b_.TypeError.$factory( +'isinstance() arg 2 cannot be a parameterized generic')} if(!cls.__class__ || !(cls.$factory !==undefined ||cls.$is_class !==undefined)){throw _b_.TypeError.$factory("isinstance() arg 2 must be a type "+ "or tuple of types")} @@ -6669,9 +7115,7 @@ case "boolean": return true}} var klass=obj.__class__ if(klass==undefined){if(typeof obj=='string'){if(cls==_b_.str){return true} -else if($B.builtin_classes.indexOf(cls)>-1){return false}}else if(obj.contructor===Number && Number.isFinite(obj)){if(cls==_b_.float){return true} -else if($B.builtin_classes.indexOf(cls)>-1){return false}}else if(typeof obj=='number' && Number.isFinite(obj)){if(Number.isFinite(obj)&& cls==_b_.int){return true} -else if($B.builtin_classes.indexOf(cls)>-1){return false}} +else if($B.builtin_classes.indexOf(cls)>-1){return false}}else if(obj.contructor===Number && Number.isFinite(obj)){if(cls==_b_.float){return true}}else if(typeof obj=='number' && Number.isFinite(obj)){if(Number.isFinite(obj)&& cls==_b_.int){return true}} klass=$B.get_class(obj)} if(klass===undefined){return false} function check(kl,cls){if(kl===cls){return true} @@ -6689,14 +7133,13 @@ if(!klass.__class__ || !(klass.$factory !==undefined ||klass.$is_class !==undefined)){throw _b_.TypeError.$factory("issubclass() arg 1 must be a class")} if(isinstance(classinfo,_b_.tuple)){for(var i=0;i < classinfo.length;i++){if(issubclass(klass,classinfo[i])){return true}} return false} +if(classinfo.__class__===$B.GenericAlias){throw _b_.TypeError.$factory( +'issubclass() arg 2 cannot be a parameterized generic')} if(classinfo.$factory ||classinfo.$is_class){if(klass===classinfo || klass.__mro__.indexOf(classinfo)>-1){return true}} -var sch=$B.$getattr(classinfo,'__subclasscheck__',_b_.None) +var sch=$B.$getattr(classinfo.__class__ ||$B.get_class(classinfo),'__subclasscheck__',_b_.None) if(sch==_b_.None){return false} -if(classinfo===_b_.type || -(classinfo.__bases__ && -classinfo.__bases__.indexOf(_b_.type)>-1)){return sch(classinfo,klass)} -return sch(klass)} +return sch(classinfo,klass)} var iterator_class=$B.make_class("iterator",function(getitem,len){return{ __class__:iterator_class,getitem:getitem,len:len,counter:-1}} ) @@ -6712,13 +7155,12 @@ callable_iterator.__next__=function(self){var res=self.func() if($B.rich_comp("__eq__",res,self.sentinel)){throw _b_.StopIteration.$factory()} return res} $B.$iter=function(obj,sentinel){ -if(sentinel===undefined){try{var _iter=$B.$getattr(obj,'__iter__') -_iter=$B.$call(_iter)}catch(err){var gi=$B.$getattr(obj,'__getitem__',-1),ln=$B.$getattr(obj,'__len__',-1) -if(gi !==-1){if(ln !==-1){var len=$B.$getattr(ln,'__call__')() -return iterator_class.$factory(gi,len)}else{return iterator_class.$factory(gi,null)}} -throw _b_.TypeError.$factory("'"+$B.class_name(obj)+ -"' object is not iterable")} -var res=$B.$call(_iter)() +if(sentinel===undefined){var klass=obj.__class__ ||$B.get_class(obj) +try{var _iter=$B.$call($B.$getattr(klass,'__iter__'))}catch(err){if(err.__class__===_b_.AttributeError){try{var gi_method=$B.$call($B.$getattr(klass,'__getitem__')),gi=function(i){return gi_method(obj,i)},ln=len(obj) +return iterator_class.$factory(gi,len)}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(obj)+ +"' object is not iterable")}} +throw err} +var res=$B.$call(_iter)(obj) try{$B.$getattr(res,'__next__')} catch(err){if(isinstance(err,_b_.AttributeError)){throw _b_.TypeError.$factory( "iter() returned non-iterator of type '"+ @@ -6730,12 +7172,14 @@ if($.args.length > 0){var sentinel=$.args[0]} return $B.$iter($.obj,sentinel)} function len(obj){check_no_kw('len',obj) check_nb_args('len',1,arguments) -try{return $B.$getattr(obj,'__len__')()} -catch(err){throw _b_.TypeError.$factory("object of type '"+ -$B.class_name(obj)+"' has no len()")}} +var klass=obj.__class__ ||$B.get_class(obj) +try{var method=$B.$getattr(klass,'__len__')}catch(err){throw _b_.TypeError.$factory("object of type '"+ +$B.class_name(obj)+"' has no len()")} +return $B.$call(method)(obj)} function locals(){ check_nb_args('locals',0,arguments) var res=$B.obj_dict($B.last($B.frames_stack)[1]) +res.$is_namespace=true delete res.$jsobj.__annotations__ return res} var map=$B.make_class("map",function(){var $=$B.args('map',2,{func:null,it1:null},['func','it1'],arguments,{},'args',null),func=$B.$call($.func) @@ -6795,7 +7239,8 @@ memoryview.__getitem__=function(self,key){if(isinstance(key,_b_.int)){var start= if(self.format=="I"){var res=self.obj.source[start],coef=256 for(var i=1;i < 4;i++){res+=self.obj.source[start+i]*coef coef*=256} -return res}else if("B".indexOf(self.format)>-1){return self.obj.source[key]}else{ +return res}else if("B".indexOf(self.format)>-1){if(key > self.obj.source.length-1){throw _b_.KeyError.$factory(key)} +return self.obj.source[key]}else{ return self.obj.source[key]}} var res=self.obj.__class__.__getitem__(self.obj,key) if(key.__class__===_b_.slice){return memoryview.$factory(res)}} @@ -6823,8 +7268,8 @@ $B.set_func_names(memoryview,"builtins") function min(){return $extreme(arguments,'__lt__')} function next(obj){check_no_kw('next',obj) var missing={},$=$B.args("next",2,{obj:null,def:null},['obj','def'],arguments,{def:missing},null,null) -var ga=$B.$getattr(obj,'__next__') -if(ga !==undefined){try{return $B.$call(ga)()}catch(err){if(err.__class__===_b_.StopIteration && +var klass=obj.__class__ ||$B.get_class(obj),ga=$B.$call($B.$getattr(klass,"__next__")) +if(ga !==undefined){try{return $B.$call(ga)(obj)}catch(err){if(err.__class__===_b_.StopIteration && $.def !==missing){return $.def} throw err}} throw _b_.TypeError.$factory("'"+$B.class_name(obj)+ @@ -6834,7 +7279,9 @@ var NotImplementedType=$B.make_class("NotImplementedType",function(){return NotI NotImplementedType.__repr__=NotImplementedType.__str__=function(self){return "NotImplemented"} var NotImplemented={__class__:NotImplementedType} function $not(obj){return !$B.$bool(obj)} -function oct(x){return $builtin_base_convert_helper(x,8)} +function oct(obj){check_no_kw('oct',obj) +check_nb_args('oct',1,arguments) +return bin_hex_oct(8,obj)} function ord(c){check_no_kw('ord',c) check_nb_args('ord',1,arguments) if(typeof c=='string'){if(c.length==1){return c.charCodeAt(0)} @@ -6844,6 +7291,10 @@ switch($B.get_class(c)){case _b_.str: if(c.length==1){return c.charCodeAt(0)} throw _b_.TypeError.$factory('ord() expected a character, but '+ 'string of length '+c.length+' found') +case _b_.str.$surrogate: +if(c.items.length==1){return c.items[0].codePointAt(0)} +throw _b_.TypeError.$factory('ord() expected a character, but '+ +'string of length '+c.items.length+' found') case _b_.bytes: case _b_.bytearray: if(c.source.length==1){return c.source[0]} @@ -6852,16 +7303,14 @@ throw _b_.TypeError.$factory('ord() expected a character, but '+ default: throw _b_.TypeError.$factory('ord() expected a character, but '+ $B.class_name(c)+' was found')}} -function pow(){var $ns=$B.args('pow',3,{x:null,y:null,z:null},['x','y','z'],arguments,{z:null},null,null) -var x=$ns['x'],y=$ns['y'],z=$ns['z'] -var res=$B.$getattr(x,'__pow__')(y,z) -if(z===null){return res} -else{if(x !=_b_.int.$factory(x)||y !=_b_.int.$factory(y)){throw _b_.TypeError.$factory("pow() 3rd argument not allowed "+ +function pow(){var $=$B.args('pow',3,{x:null,y:null,mod:null},['x','y','mod'],arguments,{mod:None},null,null),x=$.x,y=$.y,z=$.mod +var klass=x.__class__ ||$B.get_class(x) +if(z===_b_.None){return $B.$call($B.$getattr(klass,'__pow__'))(x,y)}else{if(x !=_b_.int.$factory(x)||y !=_b_.int.$factory(y)){throw _b_.TypeError.$factory("pow() 3rd argument not allowed "+ "unless all arguments are integers")} -return $B.$getattr(res,'__mod__')(z)}} +return $B.$call($B.$getattr(klass,'__pow__'))(x,y,z)}} function $print(){var $ns=$B.args('print',0,{},[],arguments,{},'args','kw') var ks=$ns['kw'].$string_dict -var end=(ks['end']===undefined ||ks['end']===None)? '\n' :ks['end'],sep=(ks['sep']===undefined ||ks['sep']===None)? ' ' :ks['sep'],file=ks['file']===undefined ? $B.stdout :ks['file'],args=$ns['args'] +var end=(ks['end']===undefined ||ks['end']===None)? '\n' :ks['end'][0],sep=(ks['sep']===undefined ||ks['sep']===None)? ' ' :ks['sep'][0],file=ks['file']===undefined ? $B.stdout :ks['file'][0],args=$ns['args'] var items=[] args.forEach(function(arg){items.push(_b_.str.$factory(arg))}) var res=items.join(sep)+end @@ -6873,7 +7322,10 @@ if(flush !==None){flush()} return None} $print.__name__='print' $print.is_func=true -var property=$B.make_class("property") +var property=$B.make_class("property",function(fget,fset,fdel,doc){var res={__class__:property} +property.__init__(res,fget,fset,fdel,doc) +return res} +) property.__init__=function(self,fget,fset,fdel,doc){self.__doc__=doc ||"" self.$type=fget.$type self.fget=fget @@ -6889,54 +7341,48 @@ self.__delete__=fdel; self.getter=function(fget){return property.$factory(fget,self.fset,self.fdel,self.__doc__)} self.setter=function(fset){return property.$factory(self.fget,fset,self.fdel,self.__doc__)} self.deleter=function(fdel){return property.$factory(self.fget,self.fset,fdel,self.__doc__)}} +property.__repr__=function(self){return _b_.repr(self.fget(self))} +property.__str__=function(self){return _b_.str.$factory(self.fget(self))} $B.set_func_names(property,"builtins") function quit(){throw _b_.SystemExit} quit.__repr__=quit.__str__=function(){return "Use quit() or Ctrl-Z plus Return to exit"} function repr(obj){check_no_kw('repr',obj) check_nb_args('repr',1,arguments) -if(obj.$is_class ||obj.$factory){ -var func=_b_.type.__getattribute__( -obj.__class__ ||$B.get_class(obj),'__repr__') -return func(obj)} -var func=$B.$getattr(obj,'__repr__') -if(func !==undefined){return $B.$call(func)()} -throw _b_.AttributeError.$factory("object has no attribute __repr__")} +var klass=obj.__class__ ||$B.get_class(obj) +return $B.$call($B.$getattr(klass,"__repr__"))(obj)} var reversed=$B.make_class("reversed",function(seq){ check_no_kw('reversed',seq) check_nb_args('reversed',1,arguments) -var rev_method=$B.$getattr(seq,'__reversed__',null) -if(rev_method !==null){try{return $B.$call(rev_method)()}catch(err){throw _b_.TypeError.$factory("'"+$B.class_name(seq)+ -"' object is not reversible")}} -try{var res={__class__:reversed,$counter :$B.$getattr(seq,'__len__')(),getter:$B.$getattr(seq,'__getitem__')} -return res}catch(err){throw _b_.TypeError.$factory("argument to reversed() must be a sequence")}} +var klass=seq.__class__ ||$B.get_class(seq),rev_method=$B.$getattr(klass,'__reversed__',null) +if(rev_method !==null){return $B.$call(rev_method)(seq)} +try{var method=$B.$getattr(klass,'__getitem__')}catch(err){throw _b_.TypeError.$factory("argument to reversed() must be a sequence")} +var res={__class__:reversed,$counter :_b_.len(seq),getter:function(i){return $B.$call(method)(seq,i)}} +return res} ) reversed.__iter__=function(self){return self} reversed.__next__=function(self){self.$counter-- if(self.$counter < 0){throw _b_.StopIteration.$factory('')} return self.getter(self.$counter)} $B.set_func_names(reversed,"builtins") -function round(arg,n){var $=$B.args('round',2,{number:null,ndigits:null},['number','ndigits'],arguments,{ndigits:None},null,null),arg=$.number,n=$.ndigits -if(!isinstance(arg,[_b_.int,_b_.float])){if(!hasattr(arg,'__round__')) -throw _b_.TypeError.$factory("type "+arg.__class__+ -" doesn't define __round__ method") -if(n===undefined){return $B.$getattr(arg,'__round__')()} -else{return $B.$getattr(arg,'__round__')(n)}} +function round(){var $=$B.args('round',2,{number:null,ndigits:null},['number','ndigits'],arguments,{ndigits:None},null,null),arg=$.number,n=$.ndigits===None ? 0 :$.ndigits +if(!isinstance(arg,[_b_.int,_b_.float])){var klass=arg.__class__ ||$B.get_class(arg) +try{return $B.$call($B.$getattr(klass,"__round__")).apply(null,arguments)}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("type "+$B.class_name(arg)+ +" doesn't define __round__ method")}else{throw err}}} if(isinstance(arg,_b_.float)&& (arg.value===Infinity ||arg.value===-Infinity)){throw _b_.OverflowError.$factory("cannot convert float infinity to integer")} -if(n===None){var floor=Math.floor(arg) -var diff=Math.abs(arg-floor) -if(diff==0.5){if(floor % 2){return Math.round(arg)}else{return Math.floor(arg)}}else{return _b_.int.$factory(Math.round(arg))}} if(!isinstance(n,_b_.int)){throw _b_.TypeError.$factory( -"'"+n.__class__+"' object cannot be interpreted as an integer")} -var mult=Math.pow(10,n) -if(isinstance(arg,_b_.float)){return _b_.float.$factory(_b_.int.__truediv__( -Number(Math.round(arg.valueOf()*mult)),mult))}else{return _b_.int.$factory(_b_.int.__truediv__( -Number(Math.round(arg.valueOf()*mult)),mult))}} +"'"+$B.class_name(n)+"' object cannot be interpreted as an integer")} +var mult=Math.pow(10,n),x=arg*mult,floor=Math.floor(x),diff=Math.abs(x-floor),res +if(diff==0.5){if(floor % 2){floor+=1} +res=_b_.int.__truediv__(floor,mult)}else{res=_b_.int.__truediv__(Math.round(x),mult)} +if($.ndigits===None){ +return res.valueOf()}else if(arg instanceof Number){return new Number(res)}else{return res.valueOf()}} function setattr(){var $=$B.args('setattr',3,{obj:null,attr:null,value:null},['obj','attr','value'],arguments,{},null,null),obj=$.obj,attr=$.attr,value=$.value if(!(typeof attr=='string')){throw _b_.TypeError.$factory("setattr(): attribute name must be string")} return $B.$setattr(obj,attr,value)} $B.$setattr=function(obj,attr,value){ var $test=false +var unaliased=$B.from_alias(attr) if($B.aliased_names[attr]){attr='$$'+attr}else if(attr=='__dict__'){ if(! isinstance(value,_b_.dict)){throw _b_.TypeError.$factory("__dict__ must be set to a dictionary, "+ "not a '"+$B.class_name(value)+"'")} @@ -6950,7 +7396,7 @@ if(value.__class__){if(value.__module__=="builtins"){error("__class__ assignemen "' object layout differs from '"+ $B.class_name(value)+"'")}}}} obj.__class__=value -return None} +return None}else if(attr=="__doc__" && obj.__class__===_b_.property){obj[attr]=value} if($test){console.log("set attr",attr,"to",obj)} if(obj.$factory ||obj.$is_class){var metaclass=obj.__class__ if($test){console.log("obj is class",metaclass,metaclass[attr])} @@ -6961,15 +7407,16 @@ obj[attr]=value if(attr=="__init__" ||attr=="__new__"){ obj.$factory=$B.$instance_creator(obj)}else if(attr=="__bases__"){ obj.__mro__=_b_.type.mro(obj)} +if($test){console.log("after setattr",obj)} return None} var res=obj[attr],klass=obj.__class__ ||$B.get_class(obj) -if($test){console.log('set attr',attr,'to',obj,obj[attr],'class',klass)} +if($test){console.log('set attr',attr,'of obj',obj,'class',klass,"obj[attr]",obj[attr])} if(res===undefined && klass){res=klass[attr] if(res===undefined){var mro=klass.__mro__,_len=mro.length for(var i=0;i < _len;i++){res=mro[i][attr] if(res !==undefined){break}}}} -if($test){console.log('set attr',attr,'found in class',res)} -if(res !==undefined){ +if($test){console.log('set attr',attr,'klass',klass,'found in class',res)} +if(res !==undefined && res !==null){ if(res.__set__ !==undefined){res.__set__(res,obj,value);return None} var rcls=res.__class__,__set1__ if(rcls !==undefined){var __set1__=rcls.__set__ @@ -6992,13 +7439,14 @@ if(klass && klass.__slots__ && special_attrs.indexOf(attr)==-1 && ! _setattr){function mangled_slots(klass){if(klass.__slots__){if(Array.isArray(klass.__slots__)){return klass.__slots__.map(function(item){if(item.startsWith("__")&& ! item.endsWith("_")){return "_"+klass.$infos.__name__+item}else{return item}})}else{return klass.__slots__}} return[]} var has_slot=false -if(mangled_slots(klass).indexOf(attr)>-1){has_slot=true}else{for(var i=0;i < klass.__mro__.length;i++){var kl=klass.__mro__[i] -if(mangled_slots(kl).indexOf(attr)>-1){has_slot=true +if(mangled_slots(klass).indexOf(unaliased)>-1){has_slot=true}else{for(var i=0;i < klass.__mro__.length;i++){var kl=klass.__mro__[i] +if(mangled_slots(kl).indexOf(unaliased)>-1){has_slot=true break}}} if(! has_slot){throw _b_.AttributeError.$factory("'"+klass.$infos.__name__+ -"' object has no attribute '"+attr+"'")}} +"' object has no attribute '"+unaliased+"'")}} if($test){console.log("attr",attr,"use _setattr",_setattr)} -if(!_setattr){if(obj.__dict__===undefined){obj[attr]=value}else{obj.__dict__.$string_dict[attr]=value}}else{_setattr(obj,attr,value)} +if(!_setattr){if(obj.__dict__===undefined){obj[attr]=value}else{_b_.dict.$setitem(obj.__dict__,unaliased,value)} +if($test){console.log("no setattr, obj",obj)}}else{_setattr(obj,attr,value)} return None} function sorted(){var $=$B.args('sorted',1,{iterable:null},['iterable'],arguments,{},null,'kw') var _list=_b_.list.$factory(iter($.iterable)),args=[_list] @@ -7012,34 +7460,43 @@ return f} ) $B.set_func_names(staticmethod,"builtins") function sum(iterable,start){var $=$B.args('sum',2,{iterable:null,start:null},['iterable','start'],arguments,{start:0},null,null),iterable=$.iterable,start=$.start -if(start===undefined){start=0}else{if(typeof start==='str'){throw _b_.TypeError.$factory("TypeError: sum() can't sum strings"+ -" [use ''.join(seq) instead]")} -if(_b_.isinstance(start,_b_.bytes)){throw _b_.TypeError.$factory("TypeError: sum() can't sum bytes"+ -" [use b''.join(seq) instead]")}} +if(_b_.isinstance(start,[_b_.str,_b_.bytes])){throw _b_.TypeError.$factory("TypeError: sum() can't sum bytes"+ +" [use b''.join(seq) instead]")} var res=start,iterable=iter(iterable) while(1){try{var _item=next(iterable) res=$B.$getattr(res,'__add__')(_item)}catch(err){if(err.__class__===_b_.StopIteration){break}else{throw err}}} return res} $B.missing_super2=function(obj){obj.$missing=true return obj} -var $$super=$B.make_class("super",function(_type1,_type2){var missing2=false -if(Array.isArray(_type2)){_type2=_type2[0] -missing2=true} -return{__class__:$$super,__thisclass__:_type1,__self_class__:_type2,$missing2:missing2}} +var $$super=$B.make_class("super",function(_type,object_or_type){if(_type===undefined && object_or_type===undefined){var frame=$B.last($B.frames_stack),pyframe=$B.imported["_sys"].Getframe() +if(pyframe.f_code && pyframe.f_code.co_varnames){_type=frame[1].__class__ +if(_type===undefined){throw _b_.RuntimeError.$factory("super(): no arguments")} +object_or_type=frame[1][pyframe.f_code.co_varnames[0]]}else{throw _b_.RuntimeError.$factory("super(): no arguments")}} +if(Array.isArray(object_or_type)){object_or_type=object_or_type[0]} +return{ +__class__:$$super,__thisclass__:_type,__self_class__:object_or_type}} ) +$$super.__get__=function(self,instance,klass){ +return $$super.$factory(self.__thisclass__,instance)} $$super.__getattribute__=function(self,attr){var mro=self.__thisclass__.__mro__,res +if(self.__thisclass__.$is_js_class){if(attr=="__init__"){ +return function(){mro[0].$js_func.call(self.__self_class__,...arguments)}}} var sc=self.__self_class__ -if(sc !==undefined){if(!sc.$is_class){sc=sc.__class__} +if(sc !==undefined){if(!sc.$is_class){sc=sc.__class__ ||$B.get_class(sc)} var sc_mro=[sc].concat(sc.__mro__) for(var i=0;i < sc_mro.length;i++){if(sc_mro[i]===self.__thisclass__){mro=sc_mro.slice(i+1) break}}} -if(attr=="__repr__" ||attr=="__str__"){ -return function(){return $$super.__repr__(self)}} -var f=_b_.type.__getattribute__(mro[0],attr) var $test=false -if($test){console.log("super",attr,self,f,f+'')} -if(f.$type=="staticmethod"){return f} -else{if(f.__class__===$B.method){ +var f +for(var i=0,len=mro.length;i < len;i++){if(mro[i][attr]!==undefined){f=mro[i][attr] +break}} +if(f===undefined){if($$super[attr]!==undefined){return(function(x){return function(){var args=[x] +for(var i=0,len=arguments.length;i < len;i++){args.push(arguments[i])} +return $$super[attr].apply(null,args)}})(self)} +if($test){console.log("no attr",attr,self,"mro",mro)} +throw _b_.AttributeError.$factory(attr)} +if($test){console.log("super",attr,self,"mro",mro,"found in mro[0]",mro[0],f,f+'')} +if(f.$type=="staticmethod" ||attr=="__new__"){return f}else if(typeof f !="function"){return f}else{if(f.__class__===$B.method){ f=f.$infos.__func__} var callable=$B.$call(f) var method=function(){var res=callable(self.__self_class__,...arguments) @@ -7070,27 +7527,41 @@ $Reader.close=function(self){self.closed=true} $Reader.flush=function(self){return None} $Reader.read=function(){var $=$B.args("read",2,{self:null,size:null},["self","size"],arguments,{size:-1},null,null),self=$.self,size=$B.$GetInt($.size) if(self.closed===true){throw _b_.ValueError.$factory('I/O operation on closed file')} -self.$counter=self.$counter ||0 -if(size < 0){var res=self.$content.substr(self.$counter) -self.$counter=self.$content.length-1 -return res} -if(self.$content.__class__===_b_.bytes){res=_b_.bytes.$factory(self.$content.source.slice(self.$counter,self.$counter+size))}else{res=self.$content.substr(self.$counter-size,size)} +make_content(self) +var len=self.$binary ? self.$bytes.source.length :self.$string.length +if(size < 0){size=len-self.$counter} +if(self.$binary){res=_b_.bytes.$factory(self.$bytes.source.slice(self.$counter,self.$counter+size))}else{res=self.$string.substr(self.$counter,size)} self.$counter+=size return res} $Reader.readable=function(self){return true} +function make_content(self){ +if(self.$binary && self.$bytes===undefined){self.$bytes=_b_.str.encode(self.$string,self.encoding)}else if((! self.$binary)&& self.$string===undefined){self.$string=_b_.bytes.decode(self.$bytes,self.encoding)}} +function make_lines(self){ +if(self.$lines===undefined){make_content(self) +if(! self.$binary){self.$lines=self.$string.split("\n")}else{console.log("make lines, binary") +var lines=[],pos=0,source=self.$bytes.source +while(true){var ix=source.indexOf(10) +if(ix==-1){lines.push({__class__:_b_.bytes,source:source}) +break}else{lines.push({__class__:_b_.bytes,source:source.slice(0,ix+1)}) +source=source.slice(ix+1)}} +self.$lines=lines}}} $Reader.readline=function(self,size){var $=$B.args("readline",2,{self:null,size:null},["self","size"],arguments,{size:-1},null,null),self=$.self,size=$B.$GetInt($.size) self.$lc=self.$lc===undefined ?-1 :self.$lc if(self.closed===true){throw _b_.ValueError.$factory('I/O operation on closed file')} -if(self.$lc==self.$lines.length-1){return ''} -self.$lc++ -var line=self.$lines[self.$lc] -if(size > 0){line=line.substr(0,size)} -self.$counter+=line.length -return line} +make_content(self) +if(self.$binary){var ix=self.$bytes.source.indexOf(10,self.$counter) +if(ix==-1){return _b_.bytes.$factory()}else{var res={__class__:_b_.bytes,source :self.$bytes.source.slice(self.$counter,ix+1)} +self.$counter=ix+1 +return res}}else{var ix=self.$string.indexOf("\n",self.$counter) +if(ix==-1){return ''}else{var res=self.$string.substring(self.$counter,ix+1) +self.$counter=ix+1 +self.$lc+=1 +return res}}} $Reader.readlines=function(){var $=$B.args("readlines",2,{self:null,hint:null},["self","hint"],arguments,{hint:-1},null,null),self=$.self,hint=$B.$GetInt($.hint) var nb_read=0 if(self.closed===true){throw _b_.ValueError.$factory('I/O operation on closed file')} self.$lc=self.$lc===undefined ?-1 :self.$lc +make_lines(self) if(hint < 0){var lines=self.$lines.slice(self.$lc+1)}else{var lines=[] while(self.$lc < self.$lines.length && nb_read < hint){self.$lc++ @@ -7107,38 +7578,50 @@ $Reader.tell=function(self){return self.$counter} $Reader.writable=function(self){return false} $B.set_func_names($Reader,"builtins") var $BufferedReader=$B.make_class('_io.BufferedReader') -$BufferedReader.__mro__=[$Reader,object] -var $TextIOWrapper=$B.make_class('_io.TextIOWrapper') -$TextIOWrapper.__mro__=[$Reader,object] +$BufferedReader.__mro__=[$Reader,_b_.object] +var $TextIOWrapper=$B.make_class('_io.TextIOWrapper',function(){var $=$B.args("TextIOWrapper",6,{buffer:null,encoding:null,errors:null,newline:null,line_buffering:null,write_through:null},["buffer","encoding","errors","newline","line_buffering","write_through"],arguments,{encoding:"utf-8",errors:_b_.None,newline:_b_.None,line_buffering:_b_.False,write_through:_b_.False},null,null) +return{ +__class__:$TextIOWrapper,$bytes:$.buffer.$bytes,encoding:$.encoding,errors:$.errors,newline:$.newline}} +) +$TextIOWrapper.__mro__=[$Reader,_b_.object] $B.set_func_names($TextIOWrapper,"builtins") +$B.Reader=$Reader $B.TextIOWrapper=$TextIOWrapper +$B.BufferedReader=$BufferedReader function $url_open(){ -var $ns=$B.args('open',3,{file:null,mode:null,encoding:null},['file','mode','encoding'],arguments,{mode:'r',encoding:'utf-8'},'args','kw'),$res +var $ns=$B.args('open',3,{file:null,mode:null,encoding:null},['file','mode','encoding'],arguments,{mode:'r',encoding:'utf-8'},'args','kw'),$bytes,$string,$res for(var attr in $ns){eval('var '+attr+'=$ns["'+attr+'"]')} if(args.length > 0){var mode=args[0]} if(args.length > 1){var encoding=args[1]} -if(isinstance(file,$B.JSObject)){return $B.OpenFile.$factory(file.js,mode,encoding)} +if(mode.search('w')>-1){throw _b_.IOError.$factory("Browsers cannot write on disk")}else if(['r','rb'].indexOf(mode)==-1){throw _b_.ValueError.$factory("Invalid mode '"+mode+"'")} if(isinstance(file,_b_.str)){ var is_binary=mode.search('b')>-1 -if($ns.file==""){console.log($ns.file,$B.file_cache[$ns.file])} -if($B.file_cache.hasOwnProperty($ns.file)){var str_content=$B.file_cache[$ns.file] -if(is_binary){$res=_b_.str.encode(str_content,"utf-8")}else{$res=str_content}}else{if(is_binary){throw _b_.IOError.$factory( +if($B.file_cache.hasOwnProperty($ns.file)){$string=$B.file_cache[$ns.file]}else if($B.files && $B.files.hasOwnProperty($ns.file)){ +$res=atob($B.files[$ns.file].content) +var source=[] +for(const char of $res){source.push(char.charCodeAt(0))} +$bytes=_b_.bytes.$factory() +$bytes.source=source}else if($B.protocol !="file"){ +if(is_binary){throw _b_.IOError.$factory( "open() in binary mode is not supported")} var req=new XMLHttpRequest(); req.onreadystatechange=function(){try{var status=this.status -if(status==404){$res=_b_.IOError.$factory('File '+file+' not found')}else if(status !=200){$res=_b_.IOError.$factory('Could not open file '+ -file+' : status '+status)}else{$res=this.responseText}}catch(err){$res=_b_.IOError.$factory('Could not open file '+file+ -' : error '+err)}} -var fake_qs='?foo='+(new Date().getTime()) +if(status==404){$res=_b_.FileNotFoundError.$factory(file)}else if(status !=200){$res=_b_.IOError.$factory('Could not open file '+ +file+' : status '+status)}else{$res=this.responseText}}catch(err){$res=_b_.IOError.$factory('Could not open file '+ +file+' : error '+err)}} +var fake_qs=$B.$options.cache ? '' : +'?foo='+(new Date().getTime()) req.open('GET',file+fake_qs,false) req.overrideMimeType('text/plain; charset=utf-8') req.send() -if($res.constructor===Error){throw $res}} -if(typeof $res=="string"){var lines=$res.split('\n') -for(var i=0;i < lines.length-1;i++){lines[i]+='\n'}}else{var lines=_b_.bytes.split($res,_b_.bytes.$factory([10]))} -var res={$content:$res,$counter:0,$lines:lines,closed:False,encoding:encoding,mode:mode,name:file} +if($res.constructor===Error){throw $res} +$string=$res}else{throw _b_.FileNotFoundError.$factory( +"cannot use 'open()' with protocol 'file'")} +if($string===undefined && $bytes===undefined){throw _b_.FileNotFoundError.$factory($ns.file)} +var res={$binary:is_binary,$string:$string,$bytes:$bytes,$counter:0,closed:False,encoding:encoding,mode:mode,name:file} res.__class__=is_binary ? $BufferedReader :$TextIOWrapper -return res}} +return res}else{throw _b_.TypeError.$factory("invalid argument for open(): "+ +_b_.str.$factory(file))}} var zip=$B.make_class("zip",function(){var res={__class__:zip,items:[]} if(arguments.length==0)return res var $ns=$B.args('zip',0,{},[],arguments,{},'args','kw') @@ -7177,7 +7660,7 @@ for(var $func in Ellipsis){if(typeof Ellipsis[$func]=='function'){Ellipsis[$func $B.set_func_names(ellipsis) var FunctionCode=$B.make_class("function code") var FunctionGlobals=$B.make_class("function globals") -$B.Function={__class__:_b_.type,__code__:{__class__:FunctionCode,__name__:'function code'},__globals__:{__class__:FunctionGlobals,__name__:'function globals'},__mro__:[object],$infos:{__name__:'function',__module__:"builtins"},$is_class:true} +$B.Function={__class__:_b_.type,__code__:{__class__:FunctionCode,__name__:'function code'},__globals__:{__class__:FunctionGlobals,__name__:'function globals'},__mro__:[_b_.object],$infos:{__name__:'function',__module__:"builtins"},$is_class:true} $B.Function.__delattr__=function(self,attr){if(attr=="__dict__"){throw _b_.TypeError.$factory("can't deleted function __dict__")}} $B.Function.__dir__=function(self){var infos=self.$infos ||{},attrs=self.$attrs ||{} return Object.keys(infos).concat(Object.keys(attrs))} @@ -7190,22 +7673,21 @@ console.log($B.last($B.frames_stack))} method.$infos={__name__:self.$infos.__name__,__qualname__:$B.class_name(obj)+"."+self.$infos.__name__,__self__:obj,__func__:self} return method} $B.Function.__getattribute__=function(self,attr){ -if(!self.$infos){console.log("get attr",attr,"from function",self,"no $infos")} if(self.$infos && self.$infos[attr]!==undefined){if(attr=='__code__'){var res={__class__:code} for(var attr in self.$infos.__code__){res[attr]=self.$infos.__code__[attr]} res.name=self.$infos.__name__ res.filename=self.$infos.__code__.co_filename res.co_code=self+"" return res}else if(attr=='__annotations__'){ -return $B.obj_dict(self.$infos[attr])}else if(self.$infos.hasOwnProperty(attr)){return self.$infos[attr]}}else if(self.$infos.__dict__ && -self.$infos.__dict__.$string_dict[attr]!==undefined){return self.$infos.__dict__.$string_dict[attr]}else if(attr=="__closure__"){var free_vars=self.$infos.__code__.co_freevars +return $B.obj_dict(self.$infos[attr])}else if(self.$infos.hasOwnProperty(attr)){return self.$infos[attr]}}else if(self.$infos && self.$infos.__dict__ && +self.$infos.__dict__.$string_dict[attr]!==undefined){return self.$infos.__dict__.$string_dict[attr][0]}else if(attr=="__closure__"){var free_vars=self.$infos.__code__.co_freevars if(free_vars.length==0){return None} var cells=[] for(var i=0;i < free_vars.length;i++){try{cells.push($B.cell.$factory($B.$check_def_free(free_vars[i])))}catch(err){ -cells.push($B.cell.$factory(null))}} -return _b_.tuple.$factory(cells)}else if(self.$attrs && self.$attrs[attr]!==undefined){return self.$attrs[attr]}else{return _b_.object.__getattribute__(self,attr)}} +cells.push($B.cell.$factory(None))}} +return _b_.tuple.$factory(cells)}else if(attr=="__globals__"){return $B.obj_dict($B.imported[self.$infos.__module__])}else if(self.$attrs && self.$attrs[attr]!==undefined){return self.$attrs[attr]}else{return _b_.object.__getattribute__(self,attr)}} $B.Function.__repr__=$B.Function.__str__=function(self){if(self.$infos===undefined){return ''}else{return ''}} -$B.Function.__mro__=[object] +$B.Function.__mro__=[_b_.object] $B.Function.__setattr__=function(self,attr,value){if(attr=="__closure__"){throw _b_.AttributeError.$factory("readonly attribute")}else if(attr=="__defaults__"){ if(value===_b_.None){value=[]}else if(! isinstance(value,_b_.tuple)){throw _b_.TypeError.$factory( "__defaults__ must be set to a tuple object")} @@ -7232,9 +7714,11 @@ else{self.$attrs=self.$attrs ||{};self.$attrs[attr]=value}} $B.Function.$factory=function(){} $B.set_func_names($B.Function,"builtins") _b_.__BRYTHON__=__BRYTHON__ -$B.builtin_funcs=["abs","all","any","ascii","bin","callable","chr","compile","delattr","dir","divmod","eval","exec","exit","format","getattr","globals","hasattr","hash","help","hex","id","input","isinstance","issubclass","iter","len","locals","max","min","next","oct","open","ord","pow","print","quit","repr","round","setattr","sorted","sum","vars" +$B.builtin_funcs=["abs","all","any","ascii","bin","breakpoint","callable","chr","compile","delattr","dir","divmod","eval","exec","exit","format","getattr","globals","hasattr","hash","help","hex","id","input","isinstance","issubclass","iter","len","locals","max","min","next","oct","open","ord","pow","print","quit","repr","round","setattr","sorted","sum","vars" ] -var builtin_function=$B.builtin_function=$B.make_class("builtin_function_or_method") +var builtin_function=$B.builtin_function=$B.make_class( +"builtin_function_or_method",function(f){f.__class__=builtin_function +return f}) builtin_function.__getattribute__=$B.Function.__getattribute__ builtin_function.__reduce_ex__=builtin_function.__reduce__=function(self){return self.$infos.__name__} builtin_function.__repr__=builtin_function.__str__=function(self){return ''} @@ -7275,13 +7759,19 @@ eval(bltns) $B.del_exc=function(){var frame=$B.last($B.frames_stack) frame[1].$current_exception=undefined} $B.set_exc=function(exc){var frame=$B.last($B.frames_stack) +if(frame===undefined){console.log("no frame",exc)} frame[1].$current_exception=$B.exception(exc)} $B.get_exc=function(){var frame=$B.last($B.frames_stack) return frame[1].$current_exception} $B.$raise=function(arg){ if(arg===undefined){var es=$B.get_exc() if(es !==undefined){throw es} -throw _b_.RuntimeError.$factory("No active exception to reraise")}else if(isinstance(arg,BaseException)){throw arg}else if(arg.$is_class && issubclass(arg,BaseException)){throw $B.$call(arg)()}else{throw _b_.TypeError.$factory("exceptions must derive from BaseException")}} +throw _b_.RuntimeError.$factory("No active exception to reraise")}else if(isinstance(arg,BaseException)){if(arg.__class__===_b_.StopIteration && +$B.last($B.frames_stack)[1].$is_generator){ +arg=_b_.RuntimeError.$factory("generator raised StopIteration")} +throw arg}else if(arg.$is_class && issubclass(arg,BaseException)){if(arg===_b_.StopIteration){if($B.last($B.frames_stack)[1].$is_generator){ +throw _b_.RuntimeError.$factory("generator raised StopIteration")}} +throw $B.$call(arg)()}else{throw _b_.TypeError.$factory("exceptions must derive from BaseException")}} $B.$syntax_err_line=function(exc,module,src,pos,line_num){ var pos2line={},lnum=1,module=module.charAt(0)=="$" ? "" :module if(src===undefined){exc.$line_info=line_num+','+module @@ -7302,8 +7792,7 @@ exc.args=_b_.tuple.$factory([$B.$getitem(exc.args,0),module,line_num,lpos,line]) exc.lineno=line_num exc.msg=exc.args[0] exc.filename=module} -$B.$SyntaxError=function(module,msg,src,pos,line_num,root){ -if(root !==undefined && root.line_info !==undefined){ +$B.$SyntaxError=function(module,msg,src,pos,line_num,root){if(root !==undefined && root.line_info !==undefined){ line_num=root.line_info} var exc=_b_.SyntaxError.$factory(msg) $B.$syntax_err_line(exc,module,src,pos,line_num) @@ -7325,8 +7814,8 @@ if(src){var lines=src.split("\n"),line=lines[parseInt(info[0])-1] trace.push(" "+line.trim())}}}) console.log("print stack ok",trace) return trace.join("\n")} -var traceback=$B.make_class("traceback",function(exc,stack){if(stack===undefined) -stack=exc.$stack +var traceback=$B.traceback=$B.make_class("traceback",function(exc,stack){var frame=$B.last($B.frames_stack) +if(stack===undefined){stack=exc.$stack} return{ __class__ :traceback,$stack:stack,exc:exc}} ) @@ -7336,23 +7825,29 @@ attr==='tb_lineno' || attr==='tb_lasti' || attr==='tb_next'){if(self.$stack.length==0){console.log("no stack",attr)} var first_frame=self.$stack[0] -if(first_frame===undefined){console.log("last frame undef",self.$stack,Object.keys(self.$stack))} -var line_info=first_frame[1].$line_info} +line_info=self.exc.$line_infos[self.exc.$line_infos.length- +self.$stack.length]} switch(attr){case "tb_frame": return frame.$factory(self.$stack) case "tb_lineno": +var lineno if(line_info===undefined || -first_frame[0].search($B.lambda_magic)>-1){if(first_frame[4]&& first_frame[4].$infos && -first_frame[4].$infos.__code__){return first_frame[4].$infos.__code__.co_firstlineno} -return-1} -else{return parseInt(line_info.split(",")[0])} +first_frame[0].startsWith($B.lambda_magic)){if(first_frame[4]&& first_frame[4].$infos && +first_frame[4].$infos.__code__){lineno=first_frame[4].$infos.__code__.co_firstlineno}else{lineno=-1}}else{lineno=parseInt(line_info.split(",")[0])} +return lineno case "tb_lasti": -if(line_info===undefined){return ""}else{var info=line_info.split(","),src +if(line_info===undefined){console.log("no line info",self.$stack) +return ""}else{var info=line_info.split(","),src,file for(var i=self.$stack.length-1;i >=0;i--){var fr=self.$stack[i] -if(fr[2]==info[1]){src=fr[3].$src +if(fr[2]==info[1].replace(/\./g,'_')){file=fr[3].__file__ break}} -if(src===undefined && $B.file_cache.hasOwnProperty(info[1])){src=$B.file_cache[info[1]]} -if(src !==undefined){return src.split("\n")[parseInt(info[0]-1)].trim()}else{return ""}} +if(src===undefined){if($B.file_cache.hasOwnProperty(file)){src=$B.file_cache[file]}else if($B.imported[info[1]]&& $B.imported[info[1]].__file__ ){src=$B.file_cache[$B.imported[info[1]].__file__] +console.log("from filecache",line_info,$B.imported[info[1]].__file__)}} +if(src !==undefined){try{return src.split("\n")[parseInt(info[0]-1)].trim()}catch(err){console.log("error in attr tb_lasti of",self) +console.log(src,info) +throw err}}else{console.log(file) +console.log("no src for",info) +return ""}} case "tb_next": if(self.$stack.length <=1){return None} else{return traceback.$factory(self.exc,self.$stack.slice(1))} @@ -7361,43 +7856,58 @@ return _b_.object.__getattribute__(self,attr)}} $B.set_func_names(traceback,"builtins") var frame=$B.make_class("frame",function(stack,pos){var fs=stack var res={__class__:frame,f_builtins :{}, -$stack:deep_copy(stack)} +$stack:stack.slice()} if(pos===undefined){pos=0} res.$pos=pos if(fs.length){var _frame=fs[pos],locals_id=_frame[0],filename try{res.f_locals=$B.obj_dict(_frame[1])}catch(err){console.log("err "+err) throw err} res.f_globals=$B.obj_dict(_frame[3]) -if(_frame[3].__file__ !==undefined){filename=_frame[3].__file__}else if(locals_id.startsWith("$exec")){filename=""} +if(_frame[3].__file__ !==undefined){filename=_frame[3].__file__} +if(locals_id.startsWith("$exec")){filename=""} if(_frame[1].$line_info===undefined){res.f_lineno=-1}else{var line_info=_frame[1].$line_info.split(",") res.f_lineno=parseInt(line_info[0]) var module_name=line_info[1] if($B.imported.hasOwnProperty(module_name)){filename=$B.imported[module_name].__file__} res.f_lineno=parseInt(_frame[1].$line_info.split(',')[0])} -var co_name=locals_id.startsWith("$exec")? "" : +var co_name=locals_id.startsWith("$exec")? "" : locals_id -if(locals_id==_frame[2]){co_name=""}else{if(_frame[0].$name){co_name=_frame[0].$name}else if(_frame.length > 4){if(_frame[4].$infos){co_name=_frame[4].$infos.__name__}else{co_name=_frame[4].name} -if(filename===undefined && _frame[4].$infos.__code__){filename=_frame[4].$infos.__code__.co_filename +if(locals_id==_frame[2]){co_name=""}else if(locals_id.startsWith("lc"+$B.lambda_magic)){co_name=""}else{if(_frame[1].$name){co_name=_frame[1].$name}else if(_frame[1].$dict_comp){co_name=''}else if(_frame[1].$list_comp){co_name=''}else if(_frame.length > 4){if(_frame[4].$infos){co_name=_frame[4].$infos.__name__}else{co_name=_frame[4].name} +if(_frame[4].$infos===undefined){ +if(_frame[4].name.startsWith("__ge")){co_name=""}else if(_frame[4].name.startsWith("set_comp"+ +$B.lambda_magic)){co_name=""}else if(_frame[4].name.startsWith("lambda"+ +$B.lambda_magic)){co_name=""}}else if(filename===undefined && _frame[4].$infos.__code__){filename=_frame[4].$infos.__code__.co_filename +if(filename===undefined){filename=_frame[4].$infos.__module__} res.f_lineno=_frame[4].$infos.__code__.co_firstlineno}}} -res.f_code={__class__:$B.code,co_code:None, -co_name:co_name, -co_filename:filename } -if(res.f_code.co_filename===undefined){res.f_code.co_filename=""}} +if(_frame.length > 4 && _frame[4].$infos !==undefined){res.f_code=_frame[4].$infos.__code__}else{res.f_code={co_name:co_name,co_filename:filename}} +res.f_code.__class__=$B.code +res.f_code.co_code=_b_.None +if(filename===undefined){res.f_code.co_filename=""}} return res} ) +frame.__delattr__=function(self,attr){if(attr=="f_trace"){$B.last(self.$stack)[1].$f_trace=_b_.None}} frame.__getattr__=function(self,attr){ -if(attr=="f_back"){if(self.$pos > 0){return frame.$factory(self.$stack.slice(0,self.$stack.length-1))}else{return _b_.None}}else if(attr=="clear"){return function(){}}} +if(attr=="f_back"){if(self.$pos > 0){return frame.$factory(self.$stack.slice(0,self.$stack.length-1),self.$pos-1)}else{return _b_.None}}else if(attr=="clear"){return function(){}}else if(attr=="f_trace"){var locals=$B.last(self.$stack)[1] +if(locals.$f_trace===undefined){return _b_.None} +return locals.$f_trace}} +frame.__setattr__=function(self,attr,value){if(attr=="f_trace"){ +$B.last(self.$stack)[1].$f_trace=value}} +frame.__str__=frame.__repr__=function(self){return ''} $B.set_func_names(frame,"builtins") $B._frame=frame var BaseException=_b_.BaseException={__class__:_b_.type,__bases__ :[_b_.object],__mro__:[_b_.object],args:[],$infos:{__name__:"BaseException",__module__:"builtins"},$is_class:true} BaseException.__init__=function(self){var args=arguments[1]===undefined ?[]:[arguments[1]] self.args=_b_.tuple.$factory(args)} -BaseException.__repr__=function(self){return self.__class__.$infos.__name__+repr(self.args)} +BaseException.__repr__=function(self){var res=self.__class__.$infos.__name__ +if(self.args[0]){res+='('+repr(self.args[0])} +if(self.args.length > 1){res+=', '+repr($B.fast_tuple(self.args.slice(1)))} +return res+')'} BaseException.__str__=function(self){if(self.args.length > 0){return _b_.str.$factory(self.args[0])} return self.__class__.$infos.__name__} BaseException.__new__=function(cls){var err=_b_.BaseException.$factory() err.__class__=cls -err.__dict__=_b_.dict.$factory() +err.__dict__=$B.empty_dict() return err} var getExceptionTrace=function(exc,includeInternal){if(exc.__class__===undefined){if($B.debug > 1){console.log("no class",exc)} return exc+''} @@ -7412,34 +7922,44 @@ var line_info=$line_info.split(','),src if(exc.module==line_info[1]){src=exc.src} if(!includeInternal){var src=frame[3].$src if(src===undefined){if($B.VFS && $B.VFS.hasOwnProperty(frame[2])){src=$B.VFS[frame[2]][1]}else if(src=$B.file_cache[frame[3].__file__]){}else{continue}}} -var module=line_info[1] -if(module.charAt(0)=="$"){module=""} -info+="\n module "+module+" line "+line_info[0] -if(frame.length > 4 && frame[4].$infos){info+=', in '+frame[4].$infos.__name__} -if(src !==undefined){var lines=src.split("\n"); -var line=lines[parseInt(line_info[0])-1] +var file=frame[3].__file__ ||"",module=line_info[1],is_exec=module.charAt(0)=="$" +if(is_exec){module=""} +info+="\n File "+file+" line "+line_info[0] +if(frame.length > 4){if(frame[4].$infos){var name=frame[4].$infos.__name__ +if(name.startsWith("lc"+$B.lambda_magic)){info+=',in '}else{info+=', in '+name}}else if(frame[4].name.startsWith("__ge")){info+=', in '}else if(frame[4].name.startsWith("set_comp"+$B.lambda_magic)){info+=', in '}else if(frame[4].name.startsWith("lc"+$B.lambda_magic)){info+=', in '}else{console.log("frame[4]",frame[4])}}else if(frame[1].$list_comp){info+=', in '}else if(frame[1].$dict_comp){info+=', in '}else{info+=', in '} +if(src !==undefined && ! is_exec){var lines=src.split("\n"),line=lines[parseInt(line_info[0])-1] if(line){line=line.replace(/^[ ]+/g,"")} -info+="\n "+line}else{console.log("src undef",line_info)}} +info+="\n "+line}} if(exc.__class__===_b_.SyntaxError){info+="\n File "+exc.args[1]+", line "+exc.args[2]+ "\n "+exc.args[4]} return info} -BaseException.__getattr__=function(self,attr){if(attr=="info"){return getExceptionTrace(self,false);}else if(attr=="infoWithInternal"){return getExceptionTrace(self,true);}else if(attr=="traceback"){ +BaseException.__getattr__=function(self,attr){if(attr=="info"){return getExceptionTrace(self,false);}else if(attr=="infoWithInternal"){return getExceptionTrace(self,true);}else if(attr=="__traceback__"){ if(self.$traceback !==undefined){return self.$traceback} return traceback.$factory(self)}else{throw _b_.AttributeError.$factory(self.__class__.$infos.__name__+ " has no attribute '"+attr+"'")}} -BaseException.with_traceback=function(self,tb){self.traceback=tb +BaseException.with_traceback=function(self,tb){self.$traceback=tb return self} -function deep_copy(stack){var result=stack.slice(); -for(var i=0;i < result.length;i++){ -result[i]=result[i].slice() -result[i][1]={$line_info:result[i][1].$line_info}} -return result;} +$B.deep_copy=function(stack){var res=[] +for(const s of stack){var item=[s[0],{},s[2],{}] +if(s[4]!==undefined){item.push(s[4])} +for(const i of[1,3]){for(var key in s[i]){item[i][key]=s[i][key]}} +res.push(item)} +return res} +$B.save_stack=function(){return $B.deep_copy($B.frames_stack)} +$B.restore_stack=function(stack,locals){$B.frames_stack=stack +$B.frames_stack[$B.frames_stack.length-1][1]=locals} +$B.freeze=function(err){ +if(err.$stack===undefined){err.$line_infos=[] +for(var i=0,len=$B.frames_stack.length;i < len;i++){err.$line_infos.push($B.frames_stack[i][1].$line_info)} +err.$stack=$B.frames_stack.slice() +if($B.frames_stack.length){err.$line_info=$B.last($B.frames_stack)[1].$line_info}}} +var show_stack=$B.show_stack=function(stack){stack=stack ||$B.frames_stack +for(const frame of stack){console.log(frame[0],frame[1].$line_info)}} BaseException.$factory=function(){var err=Error() -err.args=_b_.tuple.$factory(Array.prototype.slice.call(arguments)) +err.args=$B.fast_tuple(Array.prototype.slice.call(arguments)) err.__class__=_b_.BaseException err.$py_error=true -if(err.$stack===undefined){err.$stack=deep_copy($B.frames_stack);} -if($B.frames_stack.length){err.$line_info=$B.last($B.frames_stack)[1].$line_info} +$B.freeze(err) eval("//placeholder//") err.__cause__=_b_.None err.__context__=_b_.None @@ -7448,8 +7968,8 @@ return err} BaseException.$factory.$infos={__name__:"BaseException",__qualname__:"BaseException"} $B.set_func_names(BaseException) _b_.BaseException=BaseException -$B.exception=function(js_exc){ -if(! js_exc.$py_error){console.log("Javascript exception:",js_exc) +$B.exception=function(js_exc,in_ctx_manager){ +if(! js_exc.__class__){console.log("Javascript exception:",js_exc) console.log($B.last($B.frames_stack)) console.log("recursion error ?",$B.is_recursion_error(js_exc)) var exc=Error() @@ -7468,16 +7988,23 @@ var $message=": "+ (js_exc.message ||"<"+js_exc+">") exc.args=_b_.tuple.$factory([$message]) exc.$py_error=true -exc.$stack=deep_copy($B.frames_stack);}else{var exc=js_exc} +$B.freeze(exc)}else{var exc=js_exc +$B.freeze(exc) +if(in_ctx_manager){ +var current_locals=$B.last($B.frames_stack)[0] +for(var i=0,len=exc.$stack.length;i < len;i++){if(exc.$stack[i][0]==current_locals){exc.$stack=exc.$stack.slice(i) +exc.$traceback=traceback.$factory(exc) +break}}}} return exc} $B.is_exc=function(exc,exc_list){ if(exc.__class__===undefined){exc=$B.exception(exc)} -var this_exc_class=exc.__class__ +var this_exc_class=exc.$is_class ? exc :exc.__class__ for(var i=0;i < exc_list.length;i++){var exc_class=exc_list[i] if(this_exc_class===undefined){console.log("exc class undefined",exc)} if(issubclass(this_exc_class,exc_class)){return true}} return false} $B.is_recursion_error=function(js_exc){ +console.log("test is js exc is recursion error",js_exc,js_exc+"") var msg=js_exc+"",parts=msg.split(":"),err_type=parts[0].trim(),err_msg=parts[1].trim() return(err_type=='InternalError' && err_msg=='too much recursion')|| (err_type=='Error' && err_msg=='Out of stack space')|| @@ -7501,8 +8028,9 @@ _str[pos++]="$B.set_func_names(_b_."+name+", 'builtins')"} try{eval(_str.join(";"))}catch(err){console.log("--err"+err) throw err}} $make_exc(["SystemExit","KeyboardInterrupt","GeneratorExit","Exception"],BaseException) -$make_exc([["StopIteration","err.value = arguments[0]"],["StopAsyncIteration","err.value = arguments[0]"],"ArithmeticError","AssertionError","AttributeError","BufferError","EOFError","ImportError","LookupError","MemoryError","NameError","OSError","ReferenceError","RuntimeError","SyntaxError","SystemError","TypeError","ValueError","Warning"],_b_.Exception) +$make_exc([["StopIteration","err.value = arguments[0]"],["StopAsyncIteration","err.value = arguments[0]"],"ArithmeticError","AssertionError","AttributeError","BufferError","EOFError",["ImportError","err.name = arguments[0]"],"LookupError","MemoryError","NameError","OSError","ReferenceError","RuntimeError",["SyntaxError","err.msg = arguments[0]"],"SystemError","TypeError","ValueError","Warning"],_b_.Exception) $make_exc(["FloatingPointError","OverflowError","ZeroDivisionError"],_b_.ArithmeticError) +$make_exc([["ModuleNotFoundError","err.name = arguments[0]"]],_b_.ImportError) $make_exc(["IndexError","KeyError"],_b_.LookupError) $make_exc(["UnboundLocalError"],_b_.NameError) $make_exc(["BlockingIOError","ChildProcessError","ConnectionError","FileExistsError","FileNotFoundError","InterruptedError","IsADirectoryError","NotADirectoryError","PermissionError","ProcessLookupError","TimeoutError"],_b_.OSError) @@ -7514,7 +8042,20 @@ $make_exc(["UnicodeError"],_b_.ValueError) $make_exc(["UnicodeDecodeError","UnicodeEncodeError","UnicodeTranslateError"],_b_.UnicodeError) $make_exc(["DeprecationWarning","PendingDeprecationWarning","RuntimeWarning","SyntaxWarning","UserWarning","FutureWarning","ImportWarning","UnicodeWarning","BytesWarning","ResourceWarning"],_b_.Warning) $make_exc(["EnvironmentError","IOError","VMSError","WindowsError"],_b_.OSError) -$B.$TypeError=function(msg){throw _b_.TypeError.$factory(msg)}})(__BRYTHON__) +$B.$TypeError=function(msg){throw _b_.TypeError.$factory(msg)} +var se=_b_.SyntaxError.$factory +_b_.SyntaxError.$factory=function(){var arg=arguments[0] +if(arg.__class__===_b_.SyntaxError){return arg} +var exc=se.apply(null,arguments),frame=$B.last($B.frames_stack) +if(frame){line_info=frame[1].$line_info +exc.filename=frame[3].__file__ +exc.lineno=parseInt(line_info.split(",")[0]) +var src=$B.file_cache[frame[3].__file__] +if(src){lines=src.split("\n") +exc.text=lines[exc.lineno-1]} +exc.offset=arg.offset} +return exc} +_b_.SyntaxError})(__BRYTHON__) ; ;(function($B){var _b_=$B.builtins,None=_b_.None,range={__class__:_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"range"},$is_class:true,$native:true,$descriptors:{start:true,step:true,stop:true}} @@ -7551,7 +8092,7 @@ range.__hash__=function(self){var len=range.__len__(self) if(len==0){return _b_.hash(_b_.tuple.$factory([0,None,None]))} if(len==1){return _b_.hash(_b_.tuple.$factory([1,self.start,None]))} return _b_.hash(_b_.tuple.$factory([len,self.start,self.step]))} -var RangeIterator={__class__:_b_.type,__mro__:[_b_.object],__iter__:function(self){return self},__next__:function(self){return _b_.next(self.obj)},$infos:{__name__:"range_iterator",__module__:"builtins"}} +var RangeIterator={__class__:_b_.type,__mro__:[_b_.object],__iter__:function(self){return self},__next__:function(self){return _b_.next(self.obj)},$infos:{__name__:"range_iterator",__module__:"builtins"},$is_class:true} RangeIterator.$factory=function(obj){return{__class__:RangeIterator,obj:obj}} $B.set_func_names(RangeIterator,"builtins") range.__iter__=function(self){var res={__class__ :range,start:self.start,stop:self.stop,step:self.step} @@ -7630,12 +8171,11 @@ var step=self.step===None ? 1 :$B.PyNumber_Index(self.step),step_is_neg=$B.gt(0, if(step==0){throw _b_.ValueError.$factory('slice step cannot be zero')} var start if(self.start===None){start=step_is_neg ? len_1 :0}else{start=$B.PyNumber_Index(self.start) -if($B.gt(0,start)){start=$B.add(start,len)} -if($B.gt(0,start)){start=step < 0 ?-1 :0} +if($B.gt(0,start)){start=$B.add(start,len) +if($B.gt(0,start)){start=0}} if($B.ge(start,len)){start=step < 0 ? len_1 :len}} if(self.stop===None){stop=step_is_neg ?-1 :len}else{stop=$B.PyNumber_Index(self.stop) -if($B.gt(0,stop)){stop+=len} -if($B.gt(0,stop)){stop=step < 0 ?-1 :0} +if($B.gt(0,stop)){stop=$B.add(stop,len)} if($B.ge(stop,len)){stop=step_is_neg ? len_1 :len}} return{start:start,stop:stop,step:step}} slice.start=function(self){return self.start} @@ -7668,7 +8208,7 @@ $B.set_func_names(slice,"builtins") _b_.range=range _b_.slice=slice})(__BRYTHON__) ; -;(function($B){var _b_=$B.builtins,object=_b_.object,isinstance=_b_.isinstance,getattr=_b_.getattr,None=_b_.None +;(function($B){var _b_=$B.builtins var from_unicode={},to_unicode={} $B.to_bytes=function(obj){var res if(_b_.isinstance(obj,[bytes,bytearray])){res=obj.source}else{var ga=$B.$getattr(obj,"tobytes",null) @@ -7677,55 +8217,63 @@ else{throw _b_.TypeError.$factory("object doesn't support the buffer protocol")} return res} function _strip(self,cars,lr){if(cars===undefined){cars=[] var ws='\r\n \t' -for(var i=0,len=ws.length;i < len;i++){cars.push(ws.charCodeAt(i))}}else if(isinstance(cars,bytes)){cars=cars.source}else{throw _b_.TypeError.$factory("Type str doesn't support the buffer API")} +for(var i=0,len=ws.length;i < len;i++){cars.push(ws.charCodeAt(i))}}else if(_b_.isinstance(cars,bytes)){cars=cars.source}else{throw _b_.TypeError.$factory("Type str doesn't support the buffer API")} if(lr=='l'){for(var i=0,len=self.source.length;i < len;i++){if(cars.indexOf(self.source[i])==-1){break}} return bytes.$factory(self.source.slice(i))} for(var i=self.source.length-1;i >=0;i--){if(cars.indexOf(self.source[i])==-1){break}} return bytes.$factory(self.source.slice(0,i+1))} function invalid(other){return ! _b_.isinstance(other,[bytes,bytearray])} -var bytearray={__class__:_b_.type,__mro__:[object],$buffer_protocol:true,$infos:{__module__:"builtins",__name__:"bytearray"},$is_class:true} +var bytearray={__class__:_b_.type,__mro__:[_b_.object],$buffer_protocol:true,$infos:{__module__:"builtins",__name__:"bytearray"},$is_class:true} var mutable_methods=["__delitem__","clear","copy","count","index","pop","remove","reverse","sort"] mutable_methods.forEach(function(method){bytearray[method]=(function(m){return function(self){var args=[self.source],pos=1 for(var i=1,len=arguments.length;i < len;i++){args[pos++]=arguments[i]} return _b_.list[m].apply(null,args)}})(method)}) var bytearray_iterator=$B.make_iterator_class('bytearray_iterator') bytearray.__iter__=function(self){return bytearray_iterator.$factory(self.source)} -bytearray.__mro__=[object] +bytearray.__mro__=[_b_.object] bytearray.__repr__=bytearray.__str__=function(self){return 'bytearray('+bytes.__repr__(self)+")"} -bytearray.__setitem__=function(self,arg,value){if(isinstance(arg,_b_.int)){if(! isinstance(value,_b_.int)){throw _b_.TypeError.$factory('an integer is required')}else if(value > 255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")} +bytearray.__setitem__=function(self,arg,value){if(_b_.isinstance(arg,_b_.int)){if(! _b_.isinstance(value,_b_.int)){throw _b_.TypeError.$factory('an integer is required')}else if(value > 255){throw _b_.ValueError.$factory("byte must be in range(0, 256)")} var pos=arg if(arg < 0){pos=self.source.length+pos} if(pos >=0 && pos < self.source.length){self.source[pos]=value} -else{throw _b_.IndexError.$factory('list index out of range')}}else if(isinstance(arg,_b_.slice)){var start=arg.start===None ? 0 :arg.start -var stop=arg.stop===None ? self.source.length :arg.stop +else{throw _b_.IndexError.$factory('list index out of range')}}else if(_b_.isinstance(arg,_b_.slice)){var start=arg.start===_b_.None ? 0 :arg.start +var stop=arg.stop===_b_.None ? self.source.length :arg.stop if(start < 0){start=self.source.length+start} if(stop < 0){stop=self.source.length+stop} self.source.splice(start,stop-start) try{var $temp=_b_.list.$factory(value) -for(var i=$temp.length-1;i >=0;i--){if(! isinstance($temp[i],_b_.int)){throw _b_.TypeError.$factory('an integer is required')}else if($temp[i]> 255){throw ValueError.$factory("byte must be in range(0, 256)")} +for(var i=$temp.length-1;i >=0;i--){if(! _b_.isinstance($temp[i],_b_.int)){throw _b_.TypeError.$factory('an integer is required')}else if($temp[i]> 255){throw ValueError.$factory("byte must be in range(0, 256)")} self.source.splice(start,0,$temp[i])}}catch(err){throw _b_.TypeError.$factory("can only assign an iterable")}}else{throw _b_.TypeError.$factory('list indices must be integer, not '+ $B.class_name(arg))}} bytearray.append=function(self,b){if(arguments.length !=2){throw _b_.TypeError.$factory( "append takes exactly one argument ("+(arguments.length-1)+ " given)")} -if(! isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")} +if(! _b_.isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")} if(b > 255){throw ValueError.$factory("byte must be in range(0, 256)")} self.source[self.source.length]=b} +bytearray.extend=function(self,b){if(b.__class__===bytearray ||b.__class__===bytes){b.source.forEach(function(item){self.source.push(item)}) +return _b_.None} +var it=_b_.iter(b) +while(true){try{bytearray.__add__(self,_b_.next(it))}catch(err){if(err===_b_.StopIteration){break} +throw err}} +return _b_.None} bytearray.insert=function(self,pos,b){if(arguments.length !=3){throw _b_.TypeError.$factory( "insert takes exactly 2 arguments ("+(arguments.length-1)+ " given)")} -if(!isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")} +if(! _b_.isinstance(b,_b_.int)){throw _b_.TypeError.$factory("an integer is required")} if(b > 255){throw ValueError.$factory("byte must be in range(0, 256)")} _b_.list.insert(self.source,pos,b)} -bytearray.$factory=function(source,encoding,errors){return bytearray.__new__(bytearray,source,encoding,errors)} -var bytes={__class__ :_b_.type,__mro__:[object],$buffer_protocol:true,$infos:{__module__:"builtins",__name__:"bytes"},$is_class:true} -bytes.__add__=function(self,other){if(isinstance(other,bytes)){return self.__class__.$factory(self.source.concat(other.source))}else if(isinstance(other,bytearray)){return self.__class__.$factory(bytes.__add__(self,bytes.$factory(other)))}else if(isinstance(other,_b_.memoryview)){return self.__class__.$factory(bytes.__add__(self,_b_.memoryview.tobytes(other)))} +bytearray.$factory=function(){var args=[bytearray] +for(var i=0,len=arguments.length;i < len;i++){args.push(arguments[i])} +return bytearray.__new__.apply(null,args)} +var bytes={__class__ :_b_.type,__mro__:[_b_.object],$buffer_protocol:true,$infos:{__module__:"builtins",__name__:"bytes"},$is_class:true} +bytes.__add__=function(self,other){if(_b_.isinstance(other,bytes)){return self.__class__.$factory(self.source.concat(other.source))}else if(_b_.isinstance(other,bytearray)){return self.__class__.$factory(bytes.__add__(self,bytes.$factory(other)))}else if(_b_.isinstance(other,_b_.memoryview)){return self.__class__.$factory(bytes.__add__(self,_b_.memoryview.tobytes(other)))} throw _b_.TypeError.$factory("can't concat bytes to "+ _b_.str.$factory(other))} bytes.__contains__=function(self,other){if(typeof other=="number"){return self.source.indexOf(other)>-1} -if(self.source.length > other.source.length){return false} -var len=self.source.length -for(var i=0;i < other.source.length-self.source.length+1;i++){var flag=true +if(self.source.length < other.source.length){return false} +var len=other.source.length +for(var i=0;i < self.source.length-other.source.length+1;i++){var flag=true for(var j=0;j < len;j++){if(other.source[i+j]!=self.source[j]){flag=false break}} if(flag){return true}} @@ -7733,28 +8281,21 @@ return false} var bytes_iterator=$B.make_iterator_class("bytes_iterator") bytes.__iter__=function(self){return bytes_iterator.$factory(self.source)} bytes.__eq__=function(self,other){if(invalid(other)){return false} -return getattr(self.source,'__eq__')(other.source)} +return $B.$getattr(self.source,'__eq__')(other.source)} bytes.__ge__=function(self,other){if(invalid(other)){return _b_.NotImplemented} return _b_.list.__ge__(self.source,other.source)} bytes.__getitem__=function(self,arg){var i -if(isinstance(arg,_b_.int)){var pos=arg +if(_b_.isinstance(arg,_b_.int)){var pos=arg if(arg < 0){pos=self.source.length+pos} if(pos >=0 && pos < self.source.length){return self.source[pos]} -throw _b_.IndexError.$factory("index out of range")}else if(isinstance(arg,_b_.slice)){var step=arg.step===None ? 1 :arg.step -if(step > 0){var start=arg.start===None ? 0 :arg.start -var stop=arg.stop===None ? -getattr(self.source,'__len__')():arg.stop}else{var start=arg.start===None ? -getattr(self.source,'__len__')()-1 :arg.start -var stop=arg.stop===None ? 0 :arg.stop} -if(start < 0){start=self.source.length+start} -if(stop < 0){stop=self.source.length+stop} +throw _b_.IndexError.$factory("index out of range")}else if(_b_.isinstance(arg,_b_.slice)){var s=_b_.slice.$conv_for_seq(arg,self.source.length),start=s.start,stop=s.stop,step=s.step var res=[],i=null,pos=0 if(step > 0){stop=Math.min(stop,self.source.length) if(stop <=start){return bytes.$factory([])} for(var i=start;i < stop;i+=step){res[pos++]=self.source[i]}}else{if(stop >=start){return bytes.$factory([])} stop=Math.max(0,stop) for(var i=start;i >=stop;i+=step){res[pos++]=self.source[i]}} -return bytes.$factory(res)}else if(isinstance(arg,_b_.bool)){return self.source.__getitem__(_b_.int.$factory(arg))}} +return bytes.$factory(res)}else if(_b_.isinstance(arg,_b_.bool)){return self.source.__getitem__(_b_.int.$factory(arg))}} bytes.__gt__=function(self,other){if(invalid(other)){return _b_.NotImplemented} return _b_.list.__gt__(self.source,other.source)} bytes.__hash__=function(self){if(self===undefined){return bytes.__hashvalue__ ||$B.$py_next_hash--} @@ -7767,6 +8308,9 @@ return _b_.list.__le__(self.source,other.source)} bytes.__len__=function(self){return self.source.length} bytes.__lt__=function(self,other){if(invalid(other)){return _b_.NotImplemented} return _b_.list.__lt__(self.source,other.source)} +bytes.__mod__=function(self,args){ +var s=decode(self,"ascii","strict"),res=_b_.str.__mod__(s,args) +return _b_.str.encode(res,"ascii")} bytes.__mul__=function(){var $=$B.args('__mul__',2,{self:null,other:null},['self','other'],arguments,{},null,null),other=$B.PyNumber_Index($.other) var t=[],source=$.self.source,slen=source.length for(var i=0;i < other;i++){for(var j=0;j < slen;j++){t.push(source[j])}} @@ -7774,13 +8318,13 @@ var res=bytes.$factory() res.source=t return res} bytes.__ne__=function(self,other){return ! bytes.__eq__(self,other)} -bytes.__new__=function(cls,source,encoding,errors){var $=$B.args("__new__",4,{cls:null,source:null,encoding:null,errors:null},["cls","source","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null) +bytes.__new__=function(cls,source,encoding,errors){var $=$B.args("__new__",4,{cls:null,source:null,encoding:null,errors:null},["cls","source","encoding","errors"],arguments,{source:[],encoding:"utf-8",errors:"strict"},null,null) return bytes.$new($.cls,$.source,$.encoding,$.errors)} bytes.$new=function(cls,source,encoding,errors){ var self={__class__:cls},int_list=[],pos=0 -if(source===undefined){}else if(isinstance(source,_b_.int)){var i=source -while(i--){int_list[pos++]=0}}else{if(isinstance(source,_b_.str)){if(encoding===undefined){throw _b_.TypeError.$factory("string argument without an encoding")} -int_list=encode(source,encoding,errors)}else{ +if(source===undefined){}else if(typeof source=="number" ||_b_.isinstance(source,_b_.int)){var i=source +while(i--){int_list[pos++]=0}}else{if(typeof source=="string" ||_b_.isinstance(source,_b_.str)){if(encoding===undefined){throw _b_.TypeError.$factory("string argument without an encoding")} +int_list=encode(source,encoding ||"utf-8",errors ||"strict")}else{ int_list=_b_.list.$factory(source) for(var i=0;i < int_list.length;i++){try{var item=_b_.int.$factory(int_list[i])}catch(err){throw _b_.TypeError.$factory("'"+ $B.class_name(int_list[i])+"' object "+ @@ -7791,12 +8335,12 @@ self.source=int_list self.encoding=encoding self.errors=errors return self} -bytes.__repr__=bytes.__str__=function(self){var res="b'" +bytes.__repr__=bytes.__str__=function(self){var res="" for(var i=0,len=self.source.length;i < len;i++){var s=self.source[i] if(s==10){res+='\\n'}else if(s < 32 ||s >=128){var hx=s.toString(16) hx=(hx.length==1 ? '0' :'')+hx res+='\\x'+hx}else if(s=="\\".charCodeAt(0)){res+="\\\\"}else{res+=String.fromCharCode(s)}} -return res+"'"} +if(res.indexOf("'")>-1 && res.indexOf('"')==-1){return 'b"'+res+'"'}else{return "b'"+res.replace(new RegExp("'","g"),"\\'")+"'"}} bytes.__reduce_ex__=function(self){return bytes.__repr__(self)} bytes.capitalize=function(self){var src=self.source,len=src.length,buffer=src.slice() if(buffer[0]> 96 && buffer[0]< 123){buffer[0]-=32} @@ -7825,7 +8369,7 @@ case 'surrogateescape': case 'surrogatepass': case 'xmlcharrefreplace': case 'backslashreplace': -return decode($.self.source,$.encoding,$.errors) +return decode($.self,$.encoding,$.errors) default:}} bytes.endswith=function(){var $=$B.args('endswith',4,{self:null,suffix:null,start:null,end:null},['self','suffix','start','end'],arguments,{start:-1,end:-1},null,null) if(_b_.isinstance($.suffix,bytes)){var start=$.start==-1 ? @@ -7917,11 +8461,11 @@ if(current_char_is_letter && prev_char_was_letter=current_char_is_letter} return true} bytes.join=function(){var $ns=$B.args('join',2,{self:null,iterable:null},['self','iterable'],arguments,{}),self=$ns['self'],iterable=$ns['iterable'] -var next_func=_b_.getattr(_b_.iter(iterable),'__next__'),res=self.__class__.$factory(),empty=true +var next_func=$B.$getattr(_b_.iter(iterable),'__next__'),res=self.__class__.$factory(),empty=true while(true){try{var item=next_func() if(empty){empty=false} else{res=bytes.__add__(res,self)} -res=bytes.__add__(res,item)}catch(err){if(isinstance(err,_b_.StopIteration)){break} +res=bytes.__add__(res,item)}catch(err){if(_b_.isinstance(err,_b_.StopIteration)){break} throw err}} return res} var _lower=function(char_code){if(char_code >=65 && char_code <=90){return char_code+32}else{return char_code}} @@ -7948,6 +8492,16 @@ if(! $.sep.__class__){throw _b_.TypeError.$factory("a bytes-like object is requi var len=$.sep.source.length,src=$.self.source,i=bytes.find($.self,$.sep) return _b_.tuple.$factory([bytes.$factory(src.slice(0,i)),bytes.$factory(src.slice(i,i+len)),bytes.$factory(src.slice(i+len)) ])} +bytes.removeprefix=function(){var $=$B.args("removeprefix",2,{self:null,prefix:null},["self","prefix"],arguments,{},null,null) +if(!_b_.isinstance($.prefix,[bytes,bytearray])){throw _b_.ValueError.$factory("prefix should be bytes, not "+ +`'${$B.class_name($.prefix)}'`)} +if(bytes.startswith($.self,$.prefix)){return bytes.__getitem__($.self,_b_.slice.$factory($.prefix.source.length,_b_.None))} +return bytes.__getitem__($.self,_b_.slice.$factory(0,_b_.None))} +bytes.removesuffix=function(){var $=$B.args("removesuffix",2,{self:null,prefix:null},["self","suffix"],arguments,{},null,null) +if(!_b_.isinstance($.suffix,[bytes,bytearray])){throw _b_.ValueError.$factory("suffix should be bytes, not "+ +`'${$B.class_name($.suffix)}'`)} +if(bytes.endswith($.self,$.suffix)){return bytes.__getitem__($.self,_b_.slice.$factory(0,$.suffix.source.length+1))} +return bytes.__getitem__($.self,_b_.slice.$factory(0,_b_.None))} bytes.replace=function(){var $=$B.args('replace',4,{self:null,old:null,new:null,count:null},['self','old','new','count'],arguments,{count:-1},null,null),res=[] var self=$.self,src=self.source,len=src.length,old=$.old,$new=$.new var count=$.count >=0 ? $.count :src.length @@ -8005,17 +8559,17 @@ start=stop+len stop=start}else{stop++}} if(match ||(stop > start)){res.push(bytes.$factory(src.slice(start,stop)))} return res} -bytes.splitlines=function(){var $=$B.args('splitlines',2,{self:null,keepends:null},['self','keepends'],arguments,{keepends:false},null,null),lines=[],src=$.self.source,start=0,end=-1,newline_end=-1 -for(var i=0;i < src.length;++i){var newline_end=-1 -if(src[i]===13){end=i -newline_end=++i} -if(src[i]===10){end=newline_end==-1 ? i :i-1 -newline_end=++i} -if(newline_end !=-1){lines.push(bytes.$factory(src.slice(start,$.keepends ? -newline_end :end))) -start=i}} -if(src.length > 0){lines.push(bytes.$factory(src.slice(start)))} -return lines} +bytes.splitlines=function(self){var $=$B.args('splitlines',2,{self:null,keepends:null},['self','keepends'],arguments,{keepends:false},null,null) +if(!_b_.isinstance($.keepends,[_b_.bool,_b_.int])){throw _b_.TypeError('integer argument expected, got '+ +$B.get_class($.keepends).__name)} +var keepends=_b_.int.$factory($.keepends),res=[],source=$.self.source,start=0,pos=0 +if(! source.length){return res} +while(pos < source.length){if(pos < source.length-1 && source[pos]==0x0d && +source[pos+1]==0x0a){res.push(bytes.$factory(source.slice(start,keepends ? pos+2 :pos))) +start=pos=pos+2}else if(source[pos]==0x0d ||source[pos]==0x0a){res.push(bytes.$factory(source.slice(start,keepends ? pos+1 :pos))) +start=pos=pos+1}else{pos++}} +if(start < source.length){res.push(bytes.$factory(source.slice(start)))} +return res} bytes.startswith=function(){var $=$B.args('startswith',3,{self:null,prefix:null,start:null},['self','prefix','start'],arguments,{start:0},null,null),start=$.start if(_b_.isinstance($.prefix,bytes)){var res=true for(var i=0;i < $.prefix.source.length && res;i++){res=$.self.source[start+i]==$.prefix.source[i]} @@ -8039,10 +8593,10 @@ current_char_is_letter=is_lowercase ||is_uppercase if(current_char_is_letter){if(prev_char_was_letter && is_uppercase){buffer[i]+=32}else if(! prev_char_was_letter && is_lowercase){buffer[i]-=32}} prev_char_was_letter=current_char_is_letter} return bytes.$factory(buffer)} -bytes.translate=function(self,table,_delete){if(_delete===undefined){_delete=[]}else if(isinstance(_delete,bytes)){_delete=_delete.source}else{throw _b_.TypeError.$factory("Type "+ +bytes.translate=function(self,table,_delete){if(_delete===undefined){_delete=[]}else if(_b_.isinstance(_delete,bytes)){_delete=_delete.source}else{throw _b_.TypeError.$factory("Type "+ $B.get_class(_delete).__name+" doesn't support the buffer API")} var res=[],pos=0 -if(isinstance(table,bytes)&& table.source.length==256){for(var i=0,len=self.source.length;i < len;i++){if(_delete.indexOf(self.source[i])>-1){continue} +if(_b_.isinstance(table,bytes)&& table.source.length==256){for(var i=0,len=self.source.length;i < len;i++){if(_delete.indexOf(self.source[i])>-1){continue} res[pos++]=table.source[self.source[i]]}} return bytes.$factory(res)} var _upper=function(char_code){if(char_code >=97 && char_code <=122){return char_code-32}else{return char_code}} @@ -8068,15 +8622,12 @@ if(enc.startsWith("cp")||enc.startsWith("iso")){enc=enc.replace("-","")} enc=enc.replace(/-/g,"_") return enc} function load_decoder(enc){ -if(to_unicode[enc]===undefined){load_encoder(enc) -to_unicode[enc]={} -for(var attr in from_unicode[enc]){to_unicode[enc][from_unicode[enc][attr]]=attr}}} +if(to_unicode[enc]===undefined){var mod=_b_.__import__("encodings."+enc) +if(mod[enc].getregentry){to_unicode[enc]=$B.$getattr(mod[enc].getregentry(),"decode")}}} function load_encoder(enc){ if(from_unicode[enc]===undefined){var mod=_b_.__import__("encodings."+enc) -table=mod[enc].decoding_table -from_unicode[enc]={} -for(var i=0;i < table.length;i++){from_unicode[enc][table.charCodeAt(i)]=i}}} -var decode=$B.decode=function(b,encoding,errors){var s="",enc=normalise(encoding) +if(mod[enc].getregentry){from_unicode[enc]=$B.$getattr(mod[enc].getregentry(),"encode")}}} +var decode=$B.decode=function(obj,encoding,errors){var s="",b=obj.source,enc=normalise(encoding),has_surrogate=false switch(enc){case "utf_8": case "utf-8": case "utf8": @@ -8111,11 +8662,27 @@ cp=cp << 12 cp+=(b[pos+1]& 0x3f)<< 6 cp+=b[pos+2]& 0x3f s+=String.fromCodePoint(cp) -pos+=3}}else{if(errors=="ignore"){pos++}else if(errors=="surrogateescape"){s+=String.fromCodePoint(0xdc80+b[pos]-0x80) +pos+=3}}else if((byte >> 3)==30){ +has_surrogate=true +if(b[pos+1]===undefined){err_info=[byte,pos,"end",pos+1]}else if((b[pos+1]& 0xc0)!=0x80){err_info=[byte,pos,"continuation",pos+2]}else if(b[pos+2]===undefined){err_info=[byte,pos+'-'+(pos+1),"end",pos+2]}else if((b[pos+2]& 0xc0)!=0x80){err_info=[byte,pos,"continuation",pos+3]}else if(b[pos+3]===undefined){err_info=[byte,pos+'-'+(pos+1)+'-'+(pos+2),"end",pos+3]}else if((b[pos+2]& 0xc0)!=0x80){err_info=[byte,pos,"continuation",pos+3]} +if(err_info !==null){if(errors=="ignore"){pos=err_info[3]}else if(errors=="surrogateescape"){for(var i=pos;i < err_info[3];i++){s+=String.fromCodePoint(0xdc80+b[i]-0x80)} +pos=err_info[3]}else{throw _b_.UnicodeDecodeError.$factory( +"'utf-8' codec can't decode byte 0x"+ +err_info[0].toString(16)+" in position "+ +err_info[1]+ +(err_info[2]=="end" ? ": unexpected end of data" : +": invalid continuation byte"))}}else{var cp=byte & 0xf +cp=cp << 18 +cp+=(b[pos+1]& 0x3f)<< 12 +cp+=(b[pos+2]& 0x3f)<< 6 +cp+=(b[pos+3]& 0x3f) +s+=String.fromCodePoint(cp) +pos+=4}}else{if(errors=="ignore"){pos++}else if(errors=="surrogateescape"){s+=String.fromCodePoint(0xdc80+b[pos]-0x80) pos++}else{throw _b_.UnicodeDecodeError.$factory( "'utf-8' codec can't decode byte 0x"+ -byte.toString(16)+"in position "+pos+ +byte.toString(16)+" in position "+pos+ ": invalid start byte")}}} +if(has_surrogate){return _b_.str.$surrogate.$factory(s)} return s case "latin_1": case "windows1252": @@ -8129,8 +8696,8 @@ case "L1": b.forEach(function(item){s+=String.fromCharCode(item)}) break case "unicode_escape": -if(Array.isArray(b)){b=decode(b,"latin-1","strict")} -return b.replace(/\\n/g,"\n"). +if(obj.__class__===bytes ||obj.__class__===bytearray){obj=decode(obj,"latin-1","strict")} +return obj.replace(/\\n/g,"\n"). replace(/\\a/g,"\u0007"). replace(/\\b/g,"\b"). replace(/\\f/g,"\f"). @@ -8138,10 +8705,9 @@ replace(/\\t/g,"\t"). replace(/\\'/g,"'"). replace(/\\"/g,'"') case "raw_unicode_escape": -if(Array.isArray(b)){b=decode(b,"latin-1","strict")} -b=b.replace(/\\u([a-fA-F0-9]{4})/g,function(mo){var cp=parseInt(mo.substr(2),16) +if(obj.__class__===bytes ||obj.__class__===bytearray){obj=decode(obj,"latin-1","strict")} +return obj.replace(/\\u([a-fA-F0-9]{4})/g,function(mo){var cp=parseInt(mo.substr(2),16) return String.fromCharCode(cp)}) -return b case "ascii": for(var i=0,len=b.length;i < len;i++){var cp=b[i] if(cp <=127){s+=String.fromCharCode(cp)}else{if(errors=="ignore"){}else{var msg="'ascii' codec can't decode byte 0x"+ @@ -8152,10 +8718,7 @@ break default: try{load_decoder(enc)}catch(err){console.log(b,encoding,"error load_decoder",err) throw _b_.LookupError.$factory("unknown encoding: "+enc)} -b.forEach(function(item){var u=to_unicode[enc][item] -if(u !==undefined){s+=String.fromCharCode(u)} -else{s+=String.fromCharCode(item)}}) -break} +return to_unicode[enc](obj)[0]} return s} var encode=$B.encode=function(){var $=$B.args("encode",3,{s:null,encoding:null,errors:null},["s","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null),s=$.s,encoding=$.encoding,errors=$.errors var t=[],pos=0,enc=normalise(encoding) @@ -8166,9 +8729,15 @@ var res=[] for(var i=0,len=s.length;i < len;i++){var cp=s.charCodeAt(i) if(cp < 0x7f){res.push(cp)}else if(cp < 0x7ff){res.push(0xc0+(cp >> 6),0x80+(cp & 0x3f))}else if(cp < 0xffff){res.push(0xe0+(cp >> 12),0x80+((cp & 0xfff)>> 6),0x80+(cp & 0x3f))}else{console.log("4 bytes")}} return res +case "latin": case "latin1": +case "latin-1": +case "latin_1": +case "L1": case "iso8859_1": case "iso_8859_1": +case "8859": +case "cp819": case "windows1252": for(var i=0,len=s.length;i < len;i++){var cp=s.charCodeAt(i) if(cp <=255){t[pos++]=cp} @@ -8188,10 +8757,7 @@ for(var j=0;j < us.length;j++){t[pos++]=us.charCodeAt(j)}}} break default: try{load_encoder(enc)}catch(err){throw _b_.LookupError.$factory("unknown encoding: "+encoding)} -for(var i=0,len=s.length;i < len;i++){var cp=s.charCodeAt(i) -if(from_unicode[enc][cp]===undefined){$UnicodeEncodeError(encoding,cp,i)} -t[pos++]=from_unicode[enc][cp]} -break} +t=from_unicode[enc](s)[0].source} return t} bytes.$factory=function(source,encoding,errors){var $=$B.args("bytes",3,{source:null,encoding:null,errors:null},["source","encoding","errors"],arguments,{source:[],encoding:"utf-8",errors:"strict"},null,null) return bytes.$new(bytes,$.source,$.encoding,$.errors)} @@ -8209,6 +8775,7 @@ _b_.bytearray=bytearray})(__BRYTHON__) function create_type(obj){return $B.get_class(obj).$factory()} function clone(obj){var res=create_type(obj) res.$items=obj.$items.slice() +for(key in obj.$hashes){res.$hashes[key]=obj.$hashes[key]} return res} var set={__class__:_b_.type,$infos:{__module__:"builtins",__name__:"set"},$is_class:true,$native:true} set.__add__=function(self,other){throw _b_.TypeError.$factory( @@ -8218,11 +8785,15 @@ catch(err){return _b_.NotImplemented} var res=create_type(self) for(var i=0,len=self.$items.length;i < len;i++){if(_b_.getattr(other,"__contains__")(self.$items[i])){set.add(res,self.$items[i])}} return res} +set.__class_getitem__=function(cls,item){ +if(! Array.isArray(item)){item=[item]} +return $B.GenericAlias.$factory(cls,item)} set.__contains__=function(self,item){if(self.$simple){if(typeof item=="number" ||item instanceof Number){if(isNaN(item)){ for(var i=self.$items.length-1;i >=0;i--){if(isNaN(self.$items[i])){return true}} -return false}else{return self.$items.indexOf(item)>-1}}else if(typeof item=="string"){return self.$items.indexOf(item)>-1}} +return false}else if(item instanceof Number){return self.$numbers.indexOf(item.valueOf())>-1}else{return self.$items.indexOf(item)>-1}}else if(typeof item=="string"){return self.$items.indexOf(item)>-1}} if(! _b_.isinstance(item,set)){$B.$getattr(item,"__hash__")} -for(var i=0,len=self.$items.length;i < len;i++){if($B.rich_comp("__eq__",self.$items[i],item)){return true}} +var hash=_b_.hash(item) +if(self.$hashes[hash]){for(var i=0,len=self.$hashes[hash].length;i < len;i++){if($B.rich_comp("__eq__",self.$hashes[hash][i],item)){return true}}} return false} set.__eq__=function(self,other){ if(other===undefined){return self===set} @@ -8235,14 +8806,17 @@ set.__ge__=function(self,other){if(_b_.isinstance(other,[set,frozenset])){return return _b_.NotImplemented} set.__gt__=function(self,other){if(_b_.isinstance(other,[set,frozenset])){return set.__lt__(other,self)} return _b_.NotImplemented} +set.__hash__=_b_.None set.__init__=function(self,iterable,second){if(second===undefined){if(Array.isArray(iterable)){for(var i=0,len=iterable.length;i < len;i++){$add(self,iterable[i])} return $N}} var $=$B.args("__init__",2,{self:null,iterable:null},["self","iterable"],arguments,{iterable:[]},null,null),self=$.self,iterable=$.iterable if(_b_.isinstance(iterable,[set,frozenset])){self.$items=iterable.$items.slice() +self.$hashes={} +for(var key in iterable.$hashes){self.$hashes[key]=iterable.$hashes[key]} return $N} var it=$B.$iter(iterable) while(1){try{var item=_b_.next(it) -set.add(self,item)}catch(err){if(_b_.isinstance(err,_b_.StopIteration)){break} +$add(self,item)}catch(err){if(_b_.isinstance(err,_b_.StopIteration)){break} throw err}} return $N} var set_iterator=$B.make_iterator_class("set iterator") @@ -8258,7 +8832,8 @@ set.__len__(self)< _b_.getattr(other,"__len__")()}else{return _b_.NotImplemented set.__mro__=[_b_.object] set.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("set.__new__(): not enough arguments")} return{ -__class__:cls,$simple:true,$items:[],$numbers:[]}} +__class__:cls,$simple:true,$items:[],$numbers:[], +$hashes:{}}} set.__or__=function(self,other,accept_iter){ var res=clone(self),func=_b_.getattr($B.$iter(other),"__next__") while(1){try{set.add(res,func())} @@ -8275,25 +8850,24 @@ return set.__sub__(self,other)} set.__rxor__=function(self,other){ return set.__xor__(self,other)} set.__str__=set.__repr__=function(self){var klass_name=$B.class_name(self) -self.$cycle=self.$cycle===undefined ? 0 :self.$cycle+1 if(self.$items.length===0){return klass_name+"()"} var head=klass_name+"({",tail="})" if(head=="set({"){head="{";tail="}"} var res=[] -if(self.$cycle){self.$cycle-- -return klass_name+"(...)"} +if($B.repr.enter(self)){return klass_name+"(...)"} self.$items.sort() for(var i=0,len=self.$items.length;i < len;i++){var r=_b_.repr(self.$items[i]) if(r===self ||r===self.$items[i]){res.push("{...}")} else{res.push(r)}} res=res.join(", ") -self.$cycle-- +$B.repr.leave(self) return head+res+tail} set.__sub__=function(self,other,accept_iter){ try{$test(accept_iter,other,"-")} catch(err){return _b_.NotImplemented} -var res=create_type(self),cfunc=_b_.getattr(other,"__contains__") -for(var i=0,len=self.$items.length;i < len;i++){if(! cfunc(self.$items[i])){res.$items.push(self.$items[i])}} +var res=create_type(self),cfunc=_b_.getattr(other,"__contains__"),items=[] +for(var i=0,len=self.$items.length;i < len;i++){if(! cfunc(self.$items[i])){items.push(self.$items[i])}} +set.__init__.call(null,res,items) return res} set.__xor__=function(self,other,accept_iter){ try{$test(accept_iter,other,"^")} @@ -8315,28 +8889,38 @@ self.$numbers.indexOf(item.valueOf())>-1){}else if(typeof item=="number" && self.$numbers.indexOf(item)>-1){}else{self.$items.push(item) var value=item.valueOf() if(typeof value=="number"){self.$numbers.push(value)}}}else{ -if(item !==self.$items[ix]){self.$items.push(item)}} -return $N}else{$B.$getattr(item,"__hash__")} -var cfunc=function(other){return $B.rich_comp("__eq__",item,other)} -for(var i=0,len=self.$items.length;i < len;i++){if(cfunc(self.$items[i])){return $N}} -self.$items.push(item) +if(item !==self.$items[ix]){self.$items.push(item)}}}else{ +var hashvalue=_b_.hash(item) +var items=self.$hashes[hashvalue] +if(items===undefined){self.$hashes[hashvalue]=[item] +self.$items.push(item)}else{var items=self.$hashes[hashvalue],cfunc=function(other){return $B.rich_comp("__eq__",item,other)} +for(var i=0,len=items.length;i < len;i++){if(cfunc(items[i])){ +return $N}} +self.$hashes[hashvalue].push(item) +self.$items.push(item)}} return $N} set.add=function(){var $=$B.args("add",2,{self:null,item:null},["self","item"],arguments,{},null,null),self=$.self,item=$.item return $add(self,item)} set.clear=function(){var $=$B.args("clear",1,{self:null},["self"],arguments,{},null,null) $.self.$items=[] +$.self.$hashes={} return $N} set.copy=function(){var $=$B.args("copy",1,{self:null},["self"],arguments,{},null,null) if(_b_.isinstance($.self,frozenset)){return $.self} var res=set.$factory() -for(var i=0,len=$.self.$items.length;i < len;i++){res.$items[i]=$.self.$items[i]} +$.self.$items.forEach(function(item){res.$items.push(item)}) +$.self.$numbers.forEach(function(item){res.$numbers.push(item)}) +for(key in self.$hashes){res.$hashes[key]=self.$hashes[key]} return res} set.difference_update=function(self){var $=$B.args("difference_update",1,{self:null},["self"],arguments,{},"args",null) for(var i=0;i < $.args.length;i++){var s=set.$factory($.args[i]),_next=_b_.getattr($B.$iter(s),"__next__"),item while(true){try{item=_next() var _type=typeof item if(_type=="string" ||_type=="number"){var _index=self.$items.indexOf(item) -if(_index >-1){self.$items.splice(_index,1)}}else{for(var j=0;j < self.$items.length;j++){if($B.rich_comp("__eq__",self.$items[j],item)){self.$items.splice(j,1)}}}}catch(err){if(_b_.isinstance(err,_b_.StopIteration)){break} +if(_index >-1){self.$items.splice(_index,1)}}else{for(var j=0;j < self.$items.length;j++){if($B.rich_comp("__eq__",self.$items[j],item)){self.$items.splice(j,1) +var hash=_b_.hash(item) +if(self.$hashes[hash]){for(var k=0;k < self.$hashes[hash].length;k++){if($B.rich_comp("__eq__",self.$hashes[hash][k],item)){self.$hashes[hash].splice(k,1) +break}}}}}}}catch(err){if(_b_.isinstance(err,_b_.StopIteration)){break} throw err}}} return $N} set.discard=function(){var $=$B.args("discard",2,{self:null,item:null},["self","item"],arguments,{},null,null) @@ -8347,9 +8931,12 @@ set.intersection_update=function(){ var $=$B.args("intersection_update",1,{self:null},["self"],arguments,{},"args",null),self=$.self for(var i=0;i < $.args.length;i++){var remove=[],s=set.$factory($.args[i]) for(var j=0;j < self.$items.length;j++){var _item=self.$items[j],_type=typeof _item -if(_type=="string" ||_type=="number"){if(s.$items.indexOf(_item)==-1){remove.push(j)}}else{var found=false -for(var k=0;! found && k < s.$items.length;k++){if($B.rich_comp("__eq__",s.$items[k],_item)){found=true}} -if(! found){remove.push(j)}}} +if(_type=="string" ||_type=="number"){if(s.$items.indexOf(_item)==-1){remove.push(j)}}else{var found=false,hash=_b_.hash(_item) +if(s.$hashes[hash]){var hashes=s.$hashes[hash] +for(var k=0;! found && k < hashes.length;k++){if($B.rich_comp("__eq__",hashes[k],_item)){found=true}} +if(! found){remove.push(j) +hashes=self.$hashes[hash] +for(var k=0;! found && k < hashes.length;k++){if($B.rich_comp("__eq__",hashes[k],_item)){self.$hashes.splice(k,1)}}}}}} remove.sort(function(x,y){return x-y}).reverse() for(var j=0;j < remove.length;j++){self.$items.splice(remove[j],1)}} return $N} @@ -8357,16 +8944,28 @@ set.isdisjoint=function(){var $=$B.args("is_disjoint",2,{self:null,other:null},[ for(var i=0,len=$.self.$items.length;i < len;i++){if(_b_.getattr($.other,"__contains__")($.self.$items[i])){return false}} return true} set.pop=function(self){if(self.$items.length===0){throw _b_.KeyError.$factory('pop from an empty set')} -return self.$items.pop()} +var item=self.$items.pop() +if(typeof item !="string" && typeof item !="number"){ +var hash=_b_.hash(item),items=self.$hashes[hash] +for(var k=0;k < items.length;k++){if($B.rich_comp("__eq__",items[k],item)){self.$hashes[hash].splice(k,1) +break}}} +return item} set.remove=function(self,item){ var $=$B.args("remove",2,{self:null,item:null},["self","item"],arguments,{},null,null),self=$.self,item=$.item if(! _b_.isinstance(item,set)){_b_.hash(item)} if(typeof item=="string" ||typeof item=="number"){var _i=self.$items.indexOf(item) if(_i==-1){throw _b_.KeyError.$factory(item)} self.$items.splice(_i,1) +if(typeof item=="number"){self.$numbers.splice(self.$numbers.indexOf(item),1)} return $N} +var hash=_b_.hash(item) +if(self.$hashes[hash]){ for(var i=0,len=self.$items.length;i < len;i++){if($B.rich_comp("__eq__",self.$items[i],item)){self.$items.splice(i,1) -return $N}} +if(item instanceof Number){self.$numbers.splice(self.$numbers.indexOf(item.valueOf()),1)} +break}} +for(var i=0,len=self.$hashes[hash].length;i < len;i++){if($B.rich_comp("__eq__",self.$hashes[hash][i],item)){self.$hashes[hash].splice(i,1) +break}} +return $N} throw _b_.KeyError.$factory(item)} set.symmetric_difference_update=function(self,s){ var $=$B.args("symmetric_difference_update",2,{self:null,s:null},["self","s"],arguments,{},null,null),self=$.self,s=$.s @@ -8386,7 +8985,7 @@ return $N} set.update=function(self){ var $=$B.args("update",1,{self:null},["self"],arguments,{},"args",null) for(var i=0;i < $.args.length;i++){var other=set.$factory($.args[i]) -for(var j=0,_len=other.$items.length;j < _len;j++){set.add(self,other.$items[j])}} +for(var j=0,_len=other.$items.length;j < _len;j++){$add(self,other.$items[j])}} return $N} set.difference=function(){var $=$B.args("difference",1,{self:null},["self"],arguments,{},"args",null) if($.args.length==0){return set.copy($.self)} @@ -8417,11 +9016,12 @@ set.__isub__=$accept_only_set(set.difference_update,"-=") set.__ixor__=$accept_only_set(set.symmetric_difference_update,"^=") set.__ior__=$accept_only_set(set.update,"|=") set.$factory=function(){ -var res={__class__:set,$simple:true,$items:[],$numbers:[]} +var res={__class__:set,$simple:true,$items:[],$numbers:[],$hashes:{}} var args=[res].concat(Array.prototype.slice.call(arguments)) set.__init__.apply(null,args) return res} $B.set_func_names(set,"builtins") +set.__class_getitem__=_b_.classmethod.$factory(set.__class_getitem__) var frozenset={__class__:_b_.type,__mro__:[object],$infos:{__module__:"builtins",__name__:"frozenset"},$is_class:true,$native:true} for(var attr in set){switch(attr){case "add": case "clear": @@ -8446,10 +9046,11 @@ return $N} frozenset.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory( "frozenset.__new__(): not enough arguments")} return{ -__class__:cls,$simple:true,$items:[],$numbers:[]}} +__class__:cls,$simple:true,$items:[],$numbers:[],$hashes:{}}} var singleton_id=Math.floor(Math.random()*Math.pow(2,40)) -function empty_frozenset(){return{ -__class__:frozenset,$items:[],$numbers:[],$id:singleton_id}} +function empty_frozenset(){var res=frozenset.__new__(frozenset) +res.$id=singleton_id +return res} frozenset.$factory=function(){var $=$B.args("frozenset",1,{iterable:null},["iterable"],arguments,{iterable:null},null,null) if($.iterable===null){return empty_frozenset()} else if($.iterable.__class__==frozenset){return $.iterable} @@ -8461,28 +9062,38 @@ $B.set_func_names(frozenset,"builtins") _b_.set=set _b_.frozenset=frozenset})(__BRYTHON__) ; -;(function($B){var bltns=$B.InjectBuiltins() -eval(bltns) +;(function($B){var _b_=$B.builtins var object=_b_.object var _window=self; -$B.pyobj2structuredclone=function(obj){ +function to_simple(value){switch(typeof value){case 'string': +case 'number': +return value +case 'boolean': +return value ? "true" :"false" +case 'object': +if(value===_b_.None){return 'null'}else if(value instanceof Number){return value.valueOf()} +default: +console.log("erreur",value) +throw _b_.TypeError.$factory("keys must be str, int, "+ +"float, bool or None, not "+$B.class_name(value))}} +$B.pyobj2structuredclone=function(obj,strict){ +strict=strict===undefined ? true :strict if(typeof obj=="boolean" ||typeof obj=="number" || -typeof obj=="string"){return obj}else if(obj instanceof Number){return obj.valueOf()}else if(Array.isArray(obj)||obj.__class__===_b_.list || +typeof obj=="string"){return obj}else if(obj instanceof Number){return obj.valueOf()}else if(obj===_b_.None){return null }else if(Array.isArray(obj)||obj.__class__===_b_.list || obj.__class__===_b_.tuple){var res=[] for(var i=0,len=obj.length;i < len;i++){res.push($B.pyobj2structuredclone(obj[i]))} -return res}else if(obj.__class__===_b_.dict){if(Object.keys(obj.$numeric_dict).length > 0 || +return res}else if(_b_.isinstance(obj,_b_.dict)){if(strict){if(Object.keys(obj.$numeric_dict).length > 0 || Object.keys(obj.$object_dict).length > 0){throw _b_.TypeError.$factory("a dictionary with non-string "+ -"keys cannot be sent to or from a Web Worker")} -var res={} -for(var key in obj.$string_dict){res[key]=$B.pyobj2structuredclone(obj.$string_dict[key])} -return res}else{console.log(obj,obj.__class__) -return obj}} -$B.structuredclone2pyobj=function(obj){if(obj===null){return _b_.None}else if(typeof obj=="boolean" ||typeof obj=="number" || +"keys does not support structured clone")}} +var items=$B.dict_to_list(obj),res={} +for(var i=0,len=items.length;i < len;i++){res[to_simple(items[i][0])]=$B.pyobj2structuredclone(items[i][1])} +return res}else{return obj}} +$B.structuredclone2pyobj=function(obj){if(obj===null){return _b_.None}else if(obj===undefined){return $B.Undefined}else if(typeof obj=="boolean" ||typeof obj=="number" || typeof obj=="string"){return obj}else if(obj instanceof Number){return obj.valueOf()}else if(Array.isArray(obj)||obj.__class__===_b_.list || obj.__class__===_b_.tuple){var res=_b_.list.$factory() for(var i=0,len=obj.length;i < len;i++){res.push($B.structuredclone2pyobj(obj[i]))} -return res}else if(typeof obj=="object"){var res=_b_.dict.$factory() -for(var key in obj){res.$string_dict[key]=$B.structuredclone2pyobj(obj[key])} +return res}else if(typeof obj=="object"){var res=$B.empty_dict() +for(var key in obj){_b_.dict.$setitem(res,key,$B.structuredclone2pyobj(obj[key]))} return res}else{console.log(obj,Array.isArray(obj),obj.__class__,_b_.list,obj.__class__===_b_.list) throw _b_.TypeError.$factory(_b_.str.$factory(obj)+ " does not support the structured clone algorithm")}} @@ -8499,15 +9110,9 @@ for(var i=0,len=arguments.length;i < len;i++){args.push(pyobj2jsobj(arguments[i] var factory=self.func.bind.apply(self.func,args) var res=new factory() return $B.$JS2Py(res)}} -return JSObject.__getattribute__(self.obj,attr)} +return JSObject.__getattribute__(self,attr)} JSConstructor.$factory=function(obj){return{ -__class__:JSConstructor,obj:obj,func:obj.js_func}} -var UndefinedClass=$B.make_class("undefined",function(){return Undefined} -) -UndefinedClass.__bool__=function(){return false} -UndefinedClass.__repr__=function(){return "undefined"} -var Undefined={__class__:UndefinedClass} -$B.set_func_names(UndefinedClass,"") +__class__:JSConstructor,js:obj,func:obj.js_func}} var jsobj2pyobj=$B.jsobj2pyobj=function(jsobj){switch(jsobj){case true: case false: return jsobj} @@ -8517,7 +9122,8 @@ if(Array.isArray(jsobj)){return _b_.list.$factory(jsobj.map(jsobj2pyobj))} if(typeof jsobj==='number'){if(jsobj.toString().indexOf('.')==-1){return _b_.int.$factory(jsobj)} return _b_.float.$factory(jsobj)} if(jsobj.$nat==='kw'){return jsobj} -return JSObject.$factory(jsobj)} +if($B.$isNode(jsobj)){return $B.DOMNode.$factory(jsobj)} +return $B.JSObj.$factory(jsobj)} var pyobj2jsobj=$B.pyobj2jsobj=function(pyobj){ if(pyobj===true ||pyobj===false){return pyobj} if(pyobj===_b_.None){return null} @@ -8525,170 +9131,157 @@ if(pyobj===$B.Undefined){return undefined} var klass=$B.get_class(pyobj) if(klass===undefined){ return pyobj;} -if(klass===JSObject ||klass===JSConstructor){ +if(klass===JSConstructor){ if(pyobj.js_func !==undefined){return pyobj.js_func} return pyobj.js}else if(klass===$B.DOMNode || klass.__mro__.indexOf($B.DOMNode)>-1){ -return pyobj.elt}else if([_b_.list,_b_.tuple].indexOf(klass)>-1){ +return pyobj}else if([_b_.list,_b_.tuple].indexOf(klass)>-1){ var res=[] pyobj.forEach(function(item){res.push(pyobj2jsobj(item))}) -return res}else if(klass===_b_.dict){ +return res}else if(klass===_b_.dict ||_b_.issubclass(klass,_b_.dict)){ var jsobj={} var items=_b_.list.$factory(_b_.dict.items(pyobj)) items.forEach(function(item){if(typeof item[1]=='function'){ item[1].bind(jsobj)} jsobj[item[0]]=pyobj2jsobj(item[1])}) -return jsobj}else if(klass===$B.builtins.float){ +return jsobj}else if(klass===_b_.float){ return pyobj.valueOf()}else if(klass===$B.Function ||klass===$B.method){ return function(){try{var args=[] for(var i=0;i < arguments.length;i++){if(arguments[i]===undefined){args.push(_b_.None)} else{args.push(jsobj2pyobj(arguments[i]))}} -return pyobj2jsobj(pyobj.apply(this,args))}catch(err){console.log(err) -console.log(_b_.getattr(err,'info')) +console.log("pyobj",pyobj,"args",args) +if(pyobj.prototype.constructor===pyobj){var res=new pyobj(...args)}else{var res=pyobj.apply(this,args)} +return pyobj2jsobj(res)}catch(err){console.log(err) +console.log($B.$getattr(err,'info')) console.log($B.class_name(err)+':',err.args.length > 0 ? err.args[0]:'' ) throw err}}}else{ return pyobj}} -var JSObject={__class__:_b_.type,__mro__:[object],$infos:{__module__:"builtins",__name__:'JSObject'}} -JSObject.__bool__=function(self){return(new Boolean(self.js)).valueOf()} -JSObject.__delattr__=function(self,attr){_b_.getattr(self,attr) -delete self.js[attr] -return _b_.None} -JSObject.__dir__=function(self){return Object.keys(self.js)} -JSObject.__getattribute__=function(self,attr){var $test=false -if($test){console.log("get attr",attr,"of",self)} -if(attr.substr(0,2)=='$$'){attr=attr.substr(2)} -if(self.js===null){return object.__getattribute__(None,attr)} -if(attr=="__class__"){return JSObject} -if(attr=="__call__"){if(typeof self.js=="function"){return function(){ -var args=[] -for(var i=0;i < arguments.length;i++){args.push($B.pyobj2jsobj(arguments[i]))} -var res=self.js.apply(null,args) -if(res===undefined){return None} -return JSObject.$factory(res)}}else{throw _b_.AttributeError.$factory("object is not callable")}} -if(self.__class__===JSObject && attr=="bind" && -self.js[attr]===undefined && -self.js['addEventListener']!==undefined){ -attr='addEventListener'} -if(attr=="data" && self.js instanceof MessageEvent){return $B.structuredclone2pyobj(self.js.data)} -var js_attr=self.js[attr] -if(self.js_func && self.js_func[attr]!==undefined){js_attr=self.js_func[attr]} -if(js_attr !==undefined){if($test){console.log("jsattr",js_attr)} -if(typeof js_attr=='function'){ -var res=function(){var args=[] -for(var i=0,len=arguments.length;i < len;i++){var arg=arguments[i] +$B.JSConstructor=JSConstructor +function pyargs2jsargs(pyargs){var args=[] +for(var i=0,len=pyargs.length;i < len;i++){var arg=pyargs[i] if(arg !==undefined && arg !==null && arg.$nat !==undefined){var kw=arg.kw if(Array.isArray(kw)){kw=$B.extend(js_attr.name,...kw)} if(Object.keys(kw).length > 0){ throw _b_.TypeError.$factory( "A Javascript function can't take "+ -"keyword arguments")}}else{args.push(pyobj2jsobj(arg))}} -if(attr==='replace' && self.js===location){location.replace(args[0]) -return} -var new_this=self.js -if(self.js_func){ -new_this=self.js_func;} -if(this !==null && this !==undefined && this !==_window){new_this=this} -var result=js_attr.apply(new_this,args) -return jsobj2pyobj(result)} -res.__repr__=function(){return ''} -res.__str__=function(){return ''} +"keyword arguments")}}else{args.push($B.pyobj2jsobj(arg))}} +return args} +$B.JSObj=$B.make_class("JSObj",function(jsobj){if(Array.isArray(jsobj)){}else if(typeof jsobj=="function"){jsobj.$is_js_func=true +jsobj.__new__=function(){return new jsobj.$js_func(...arguments)}}else if(typeof jsobj=="number" && ! Number.isInteger(jsobj)){return new Number(jsobj)} +return jsobj} +) +$B.JSObj.__sub__=function(self,other){ +if(typeof self=="bigint" && typeof other=="bigint"){return self-other} +throw _b_.TypeError.$factory("unsupported operand type(s) for - : '"+ +$B.class_name(self)+"' and '"+$B.class_name(other)+"'")} +var ops={'+':'__add__','*':'__mul__','**':'__pow__','%' :'__mod__'} +for(var op in ops){eval('$B.JSObj.'+ops[op]+' = '+ +($B.JSObj.__sub__+'').replace(/-/g,op))} +$B.JSObj.__eq__=function(self,other){switch(typeof self){case "object": +if(Object.keys(self).length !==Object.keys(other).length){return false} +for(var key in self){if(! $B.JSObj.__eq__(self[key],other[key])){return false}} +default: +return self===other}} +$B.JSObj.__ne__=function(self,other){return ! $B.JSObj.__eq__(self,other)} +$B.JSObj.__getattribute__=function(self,attr){var test=false +if(test){console.log("__ga__",self,attr)} +if(attr=="$$new" && typeof self=="function"){ +if(self.$js_func){return function(){var args=pyargs2jsargs(arguments) +return $B.JSObj.$factory(new self.$js_func(...args))}}else{return function(){var args=pyargs2jsargs(arguments) +return $B.JSObj.$factory(new self(...args))}}} +if(typeof attr=="string"){attr=$B.from_alias(attr)} +var js_attr=self[attr] +if(js_attr==undefined && typeof self=="function" && self.$js_func){js_attr=self.$js_func[attr]} +if(js_attr===undefined){if(typeof self.getNamedItem=='function'){var res=self.getNamedItem(attr) +if(res !==undefined){return $B.JSObj.$factory(res)}} +var klass=$B.get_class(self) +if(klass && klass[attr]){var class_attr=klass[attr] +if(typeof class_attr=="function"){return function(){var args=[self] +for(var i=0,len=arguments.length;i < len;i++){args.push(arguments[i])} +return $B.JSObj.$factory(class_attr.apply(null,args))}}else{return class_attr}} +if(attr=="bind" && typeof self.addEventListener=="function"){return function(event,callback){return self.addEventListener(event,callback)}} +throw _b_.AttributeError.$factory(attr)} +if(typeof js_attr==='function'){var res=function(){var args=pyargs2jsargs(arguments),target=self.$js_func ||self +try{var result=js_attr.apply(target,args)}catch(err){console.log("error",err) +console.log("attribute",attr,"of self",self,js_attr,args,arguments) +throw err} +if(result===undefined){return $B.Undefined}else if(result===null){return _b_.None} +return $B.JSObj.$factory(result)} res.prototype=js_attr.prototype -return{__class__:JSObject,js:res,js_func:js_attr}}else{if($test){console.log("use JS2Py",$B.$JS2Py(js_attr))} -return $B.$JS2Py(js_attr)}}else if(self.js===_window && attr==='$$location'){ -return $Location()} -var res=self.__class__[attr] -if(res===undefined){ -var mro=self.__class__.__mro__ -for(var i=0,len=mro.length;i < len;i++){var v=mro[i][attr] -if(v !==undefined){res=v -break}}} -if(res !==undefined){if($test){console.log("found in klass",res+"")} -if(typeof res==='function'){ -return function(){var args=[self] -for(var i=0,len=arguments.length;i < len;i++){var arg=arguments[i] -if(arg &&(arg.__class__===JSObject || -arg.__class__===JSConstructor)){args.push(arg.js)}else{args.push(arg)}} -return res.apply(self,args)}} -return $B.$JS2Py(res)}else{ -throw _b_.AttributeError.$factory("no attribute "+attr+' for '+ -self.js)}} -JSObject.__getitem__=function(self,rank){if(typeof self.js.length=='number'){if((typeof rank=="number" ||typeof rank=="boolean")&& -typeof self.js.item=='function'){var rank_to_int=_b_.int.$factory(rank) -if(rank_to_int < 0){rank_to_int+=self.js.length} -var res=self.js.item(rank_to_int) +res.$js_func=js_attr +res.__mro__=[_b_.object] +res.$infos={__name__:js_attr.name,__qualname__:js_attr.name} +if($B.frames_stack.length > 0){res.$infos.__module__=$B.last($B.frames_stack)[3].__name__} +return $B.JSObj.$factory(res)}else{return $B.JSObj.$factory(js_attr)}} +$B.JSObj.__setattr__=function(self,attr,value){if(typeof attr=="string"){attr=$B.from_alias(attr)} +self[attr]=$B.pyobj2structuredclone(value) +return _b_.None} +$B.JSObj.__getitem__=function(self,key){if(typeof key=="string"){return $B.JSObj.__getattribute__(self,key)}else if(typeof key=="number"){if(self[key]!==undefined){return $B.JSObj.$factory(self[key])} +if(typeof self.length=='number'){if((typeof key=="number" ||typeof key=="boolean")&& +typeof self.item=='function'){var rank=_b_.int.$factory(key) +if(rank < 0){rank+=self.length} +var res=self.item(rank) if(res===null){throw _b_.IndexError.$factory(rank)} -return JSObject.$factory(res)}else if(typeof rank=="string" && -typeof self.js.getNamedItem=='function'){var res=JSObject.$factory(self.js.getNamedItem(rank)) -if(res===undefined){throw _b_.KeyError.$factory(rank)} -return res}} -try{return getattr(self.js,'__getitem__')(rank)} -catch(err){if(self.js[rank]!==undefined){return JSObject.$factory(self.js[rank])} -throw _b_.KeyError.$factory(rank)}} -var JSObject_iterator=$B.make_iterator_class('JS object iterator') -JSObject.__iter__=function(self){var items=[] -if(_window.Symbol && self.js[Symbol.iterator]!==undefined){ +return $B.JSObj.$factory(res)}}} +throw _b_.KeyError.$factory(rank)} +$B.JSObj.__setitem__=$B.JSObj.__setattr__ +var JSObj_iterator=$B.make_iterator_class('JS object iterator') +$B.JSObj.__iter__=function(self){var items=[] +if(_window.Symbol && self[Symbol.iterator]!==undefined){ var items=[] -if(self.js.next !==undefined){while(true){var nxt=self.js.next() +if(self.next !==undefined){while(true){var nxt=self.next() if(nxt.done){break} -items.push(nxt.value)}}else if(self.js.length !==undefined && self.js.item !==undefined){for(var i=0;i < self.js.length;i++){items.push(self.js.item(i))}} -return JSObject_iterator.$factory(items)}else if(self.js.length !==undefined && self.js.item !==undefined){ -for(var i=0;i < self.js.length;i++){items.push(JSObject.$factory(self.js.item(i)))} -return JSObject_iterator.$factory(items)} -var _dict=JSObject.to_dict(self) -return _b_.dict.__iter__(_dict)} -JSObject.__le__=function(self,other){if(typeof self.js["appendChild"]=="function"){return $B.DOMNode.__le__($B.DOMNode.$factory(self.js),other)} -return _b_.NotImplemented} -JSObject.__len__=function(self){if(typeof self.js.length=='number'){return self.js.length} -try{return getattr(self.js,'__len__')()} -catch(err){throw _b_.AttributeError.$factory(self.js+' has no attribute __len__')}} -JSObject.__repr__=function(self){if(self.js instanceof Date){return self.js.toString()} -var proto=Object.getPrototypeOf(self.js) -if(proto){var name=proto.constructor.name -if(name===undefined){ -var proto_str=proto.constructor.toString() -name=proto_str.substring(8,proto_str.length-1)} -return "<"+name+" object>"} -return ""} -JSObject.__setattr__=function(self,attr,value){if(attr.substr && attr.substr(0,2)=='$$'){ -attr=attr.substr(2)} -if(isinstance(value,JSObject)){self.js[attr]=value.js} -else{self.js[attr]=value -if(typeof value=='function'){self.js[attr]=function(){var args=[] -for(var i=0,len=arguments.length;i < len;i++){console.log(i,arguments[i]) -args.push($B.$JS2Py(arguments[i]))} -try{return value.apply(null,args)} -catch(err){err=$B.exception(err) -var info=_b_.getattr(err,'info') -if(err.args.length > 0){err.toString=function(){return info+'\n'+$B.class_name(err)+ -': '+_b_.repr(err.args[0])}}else{err.toString=function(){return info+'\n'+$B.class_name(err)}} -console.log(err+'') -throw err}}}}} -JSObject.__setitem__=JSObject.__setattr__ -JSObject.__str__=JSObject.__repr__ -var no_dict={'string':true,'function':true,'number':true,'boolean':true} -JSObject.bind=function(self,evt,func){var js_func=function(ev){return func(jsobj2pyobj(ev))} -self.js.addEventListener(evt,js_func) +items.push($B.JSObj.$factory(nxt.value))}}else if(self.length !==undefined && self.item !==undefined){for(var i=0;i < self.length;i++){items.push($B.JSObj.$factory(self.item(i)))}} +return JSObj_iterator.$factory(items)}else if(self.length !==undefined && self.item !==undefined){ +for(var i=0;i < self.length;i++){items.push($B.JSObj.$factory(self.js.item(i)))} +return JSObj_iterator.$factory(items)} +return JSObj_iterator.$factory(Object.keys(self))} +$B.JSObj.__len__=function(self){if(typeof self.length=='number'){return self.length} +throw _b_.AttributeError.$factory(self+' has no attribute __len__')} +$B.JSObj.__repr__=$B.JSObj.__str__=function(self){return ''} +$B.JSObj.bind=function(self,evt,func){ +var js_func=function(ev){return func(jsobj2pyobj(ev))} +self.addEventListener(evt,js_func) return _b_.None} -JSObject.to_dict=function(self){ -return $B.obj_dict(self.js,true)} -JSObject.$factory=function(obj){if(obj===null){return _b_.None} -if(typeof obj=='function'){return{__class__:JSObject,js:obj,js_func:obj}} -var klass=$B.get_class(obj) -if(klass===_b_.float){return _b_.float.$factory(obj)} -if(klass===_b_.list){return $B.JSArray.$factory(obj)} -if(klass !==undefined){return obj} -return{ -__class__:JSObject,js:obj}} -$B.JSObject=JSObject -$B.JSConstructor=JSConstructor})(__BRYTHON__) +$B.JSObj.to_dict=function(self){ +return $B.structuredclone2pyobj(self)} +$B.set_func_names($B.JSObj,"builtins") +$B.JSMeta=$B.make_class("JSMeta") +$B.JSMeta.__call__=function(cls){ +var extra_args=[],klass=arguments[0] +for(var i=1,len=arguments.length;i < len;i++){extra_args.push(arguments[i])} +var new_func=_b_.type.__getattribute__(klass,"__new__") +var instance=new_func.apply(null,arguments) +if(instance instanceof cls.__mro__[0].$js_func){ +var init_func=_b_.type.__getattribute__(klass,"__init__") +if(init_func !==_b_.object.__init__){ +var args=[instance].concat(extra_args) +init_func.apply(null,args)}} +return instance} +$B.JSMeta.__mro__=[_b_.type,_b_.object] +$B.JSMeta.__getattribute__=function(cls,attr){if(cls[attr]!==undefined){return cls[attr]}else if($B.JSMeta[attr]!==undefined){return $B.JSMeta[attr]}else{ +return _b_.type.__getattribute__(cls,attr)}} +$B.JSMeta.__init_subclass__=function(){} +$B.JSMeta.__new__=function(metaclass,class_name,bases,cl_dict){ +eval("var "+class_name+`=function(){if(cl_dict.$string_dict.__init__){var args=[this] +for(var i=0,len=arguments.length;i < len;i++){args.push(arguments[i])} +cl_dict.$string_dict.__init__[0].apply(this,args)}else{return new bases[0].$js_func(...arguments)}}`) +var new_js_class=eval(class_name) +new_js_class.prototype=Object.create(bases[0].$js_func.prototype) +new_js_class.prototype.constructor=new_js_class +new_js_class.__mro__=[bases[0],_b_.type] +new_js_class.$is_js_class=true +return new_js_class} +$B.set_func_names($B.JSMeta,"builtins")})(__BRYTHON__) ; ;(function($B){$B.stdlib={} -var pylist=['VFS_import','__future__','_abcoll','_codecs','_collections','_collections_abc','_compat_pickle','_contextvars','_csv','_dummy_thread','_functools','_imp','_io','_markupbase','_py_abc','_pydecimal','_queue','_random','_socket','_sre','_struct','_sysconfigdata','_sysconfigdata_0_brython_','_testcapi','_thread','_threading_local','_weakref','_weakrefset','abc','antigravity','argparse','atexit','base64','bdb','binascii','bisect','calendar','cmath','cmd','code','codecs','codeop','colorsys','configparser','contextlib','contextvars','copy','copyreg','csv','dataclasses','datetime','decimal','difflib','doctest','enum','errno','external_import','faulthandler','fnmatch','formatter','fractions','functools','gc','genericpath','getopt','gettext','glob','heapq','imp','inspect','io','ipaddress','itertools','keyword','linecache','locale','nntplib','numbers','opcode','operator','optparse','os','pdb','pickle','platform','posixpath','pprint','profile','pwd','py_compile','pydoc','queue','quopri','re','reprlib','select','selectors','shlex','shutil','signal','site','site-packages.__future__','site-packages.docs','site-packages.header','site-packages.test_sp','socket','sre_compile','sre_constants','sre_parse','stat','string','struct','subprocess','sys','sysconfig','tarfile','tb','tempfile','test.namespace_pkgs.module_and_namespace_package.a_test','textwrap','this','threading','time','timeit','token','tokenize','traceback','turtle','types','typing','uuid','warnings','weakref','webbrowser','zipfile','zlib'] +var pylist=['VFS_import','__future__','_abcoll','_codecs','_collections','_collections_abc','_compat_pickle','_contextvars','_csv','_dummy_thread','_frozen_importlib','_functools','_imp','_io','_markupbase','_operator','_py_abc','_pydecimal','_queue','_random','_socket','_sre','_struct','_sysconfigdata','_sysconfigdata_0_brython_','_testcapi','_thread','_threading_local','_weakref','_weakrefset','abc','antigravity','argparse','atexit','base64','bdb','binascii','bisect','browser.aio','browser.ajax','browser.highlight','browser.html','browser.indexed_db','browser.local_storage','browser.markdown','browser.object_storage','browser.session_storage','browser.svg','browser.template','browser.timer','browser.webcomponent','browser.websocket','browser.webworker','browser.worker','calendar','cmath','cmd','code','codecs','codeop','colorsys','configparser','contextlib','contextvars','copy','copyreg','csv','dataclasses','datetime','decimal','difflib','doctest','enum','errno','external_import','faulthandler','fnmatch','formatter','fractions','functools','gc','genericpath','getopt','gettext','glob','heapq','hmac','imp','inspect','interpreter','io','ipaddress','itertools','json','keyword','linecache','locale','mimetypes','nntplib','ntpath','numbers','opcode','operator','optparse','os','pathlib','pdb','pickle','pkgutil','platform','posixpath','pprint','profile','pwd','py_compile','pydoc','queue','quopri','re','reprlib','select','selectors','shlex','shutil','signal','site','site-packages.__future__','site-packages.docs','site-packages.header','site-packages.test','site-packages.test_sp','socket','sre_compile','sre_constants','sre_parse','stat','string','stringprep','struct','subprocess','sys','sysconfig','tarfile','tb','tempfile','test.namespace_pkgs.module_and_namespace_package.a_test','textwrap','this','threading','time','timeit','token','tokenize','traceback','turtle','types','typing','uu','uuid','warnings','weakref','webbrowser','zipfile','zipimport','zlib'] for(var i=0;i < pylist.length;i++){$B.stdlib[pylist[i]]=['py']} -var js=['_aio','_ajax','_base64','_binascii','_jsre','_locale','_multiprocessing','_posixsubprocess','_profile','_sre_utils','_string','_strptime','_svg','_warnings','_webworker','_zlib_utils','aes','array','builtins','dis','hashlib','hmac-md5','hmac-ripemd160','hmac-sha1','hmac-sha224','hmac-sha256','hmac-sha3','hmac-sha384','hmac-sha512','long_int','marshal','math','md5','modulefinder','pbkdf2','posix','rabbit','rabbit-legacy','random','rc4','ripemd160','sha1','sha224','sha256','sha3','sha384','sha512','tripledes','unicodedata'] +var js=['_aio','_ajax','_base64','_binascii','_cmath','_io_classes','_json','_jsre','_locale','_multiprocessing','_posixsubprocess','_profile','_sre_utils','_string','_strptime','_svg','_warnings','_webcomponent','_webworker','_zlib_utils','aes','array','builtins','dis','hashlib','hmac-md5','hmac-ripemd160','hmac-sha1','hmac-sha224','hmac-sha256','hmac-sha3','hmac-sha384','hmac-sha512','long_int','marshal','math','math1','md5','modulefinder','pbkdf2','posix','rabbit','rabbit-legacy','random','rc4','ripemd160','sha1','sha224','sha256','sha3','sha384','sha512','tripledes','unicodedata'] for(var i=0;i < js.length;i++){$B.stdlib[js[i]]=['js']} -var pkglist=['asyncio','browser','browser.widgets','collections','concurrent','concurrent.futures','email','email.mime','encodings','html','http','importlib','json','logging','multiprocessing','multiprocessing.dummy','pydoc_data','site-packages.simpleaio','site-packages.ui','test','test.encoded_modules','test.leakers','test.namespace_pkgs.not_a_namespace_pkg.foo','test.support','test.test_email','test.test_importlib','test.test_importlib.builtin','test.test_importlib.extension','test.test_importlib.frozen','test.test_importlib.import_','test.test_importlib.source','test.test_json','test.tracedmodules','unittest','unittest.test','unittest.test.testmock','urllib'] +var pkglist=['browser.widgets','collections','concurrent','concurrent.futures','email','email.mime','encodings','html','http','importlib','logging','multiprocessing','multiprocessing.dummy','pydoc_data','site-packages.foobar','site-packages.simpleaio','site-packages.simpy','site-packages.simpy.resources','site-packages.ui','test','test.encoded_modules','test.leakers','test.namespace_pkgs.not_a_namespace_pkg.foo','test.support','test.test_email','test.test_importlib','test.test_importlib.builtin','test.test_importlib.extension','test.test_importlib.frozen','test.test_importlib.import_','test.test_importlib.source','test.test_json','test.tracedmodules','unittest','unittest.test','unittest.test.testmock','urllib'] for(var i=0;i < pkglist.length;i++){$B.stdlib[pkglist[i]]=['py',true]}})(__BRYTHON__) ; @@ -8696,50 +9289,39 @@ for(var i=0;i < pkglist.length;i++){$B.stdlib[pkglist[i]]=['py',true]}})(__BRYTH var module=$B.module={__class__ :_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"module"},$is_class:true} module.__init__=function(){} module.__new__=function(cls,name,doc,$package){return{ -$class:cls,__name__:name,__doc__:doc ||_b_.None,__package__:$package ||_b_.None}} +__class__:cls,__name__:name,__doc__:doc ||_b_.None,__package__:$package ||_b_.None}} module.__repr__=module.__str__=function(self){var res=""} module.__setattr__=function(self,attr,value){if(self.__name__=="__builtins__"){ $B.builtins[attr]=value}else{self[attr]=value}} module.$factory=function(name,doc,$package){return{ -$class:module,__name__:name,__doc__:doc ||_b_.None,__package__:$package ||_b_.None}} +__class__:module,__name__:name,__doc__:doc ||_b_.None,__package__:$package ||_b_.None}} $B.set_func_names(module,"builtins") function parent_package(mod_name){var parts=mod_name.split(".") parts.pop() return parts.join(".")} -function $download_module(module,url,$package){var xhr=new XMLHttpRequest(),fake_qs -switch($B.$options.cache){case "version": -fake_qs="?v="+$B.version_info[2] -break -case "browser": -fake_qs="" -break -default: -fake_qs="?v="+(new Date().getTime())} +function $download_module(mod,url,$package){var xhr=new XMLHttpRequest(),fake_qs="?v="+(new Date().getTime()),res=null,mod_name=mod.__name__ var timer=_window.setTimeout(function(){xhr.abort()},5000) -var res=null,mod_name=module.__name__,res,t0=new Date() -$B.download_time=$B.download_time ||0 -xhr.open("GET",url+fake_qs,false) +if($B.$options.cache){xhr.open("GET",url,false)}else{xhr.open("GET",url+fake_qs,false)} xhr.send() -if($B.$CORS){if(xhr.status==200 ||xhr.status==0){res=xhr.responseText}else{res=_b_.FileNotFoundError.$factory("No module named '"+ +if($B.$CORS){if(xhr.status==200 ||xhr.status==0){res=xhr.responseText}else{res=_b_.ModuleNotFoundError.$factory("No module named '"+ mod_name+"'")}}else{if(xhr.readyState==4){if(xhr.status==200){res=xhr.responseText -module.$last_modified= +mod.$last_modified= xhr.getResponseHeader("Last-Modified")}else{ -console.log("Error "+xhr.status+ +console.info("Error "+xhr.status+ " means that Python module "+mod_name+ " was not found at url "+url) -res=_b_.FileNotFoundError.$factory("No module named '"+ +res=_b_.ModuleNotFoundError.$factory("No module named '"+ mod_name+"'")}}} _window.clearTimeout(timer) -if(res==null){throw _b_.FileNotFoundError.$factory("No module named '"+ +if(res==null){throw _b_.ModuleNotFoundError.$factory("No module named '"+ mod_name+"' (res is null)")} if(res.constructor===Error){throw res} -$B.download_time+=(new Date())-t0 return res} $B.$download_module=$download_module -function import_js(module,path){try{var module_contents=$download_module(module,path,undefined)}catch(err){return null} -run_js(module_contents,path,module) +function import_js(mod,path){try{var module_contents=$download_module(mod,path,undefined)}catch(err){return null} +run_js(module_contents,path,mod) return true} function run_js(module_contents,path,_module){ try{var $module=new Function(module_contents+";\nreturn $module")() @@ -8748,7 +9330,7 @@ console.log(path,_module) throw err} try{$module} catch(err){console.log("no $module") -throw _b_.ImportError.$factory("name '$module' is not defined in module")} +throw _b_.ImportError.$factory("name '$module' not defined in module")} $module.__name__=_module.__name__ for(var attr in $module){if(typeof $module[attr]=="function"){$module[attr].$infos={__module__:_module.__name__,__name__:attr,__qualname__:attr}}} if(_module !==undefined){ @@ -8767,123 +9349,159 @@ function show_ns(){var kk=Object.keys(_window) for(var i=0,len=kk.length;i < len;i++){console.log(kk[i]) if(kk[i].charAt(0)=="$"){console.log(eval(kk[i]))}} console.log("---")} -function import_py(module,path,$package){ -var mod_name=module.__name__,module_contents=$download_module(module,path,$package) -module.$src=module_contents -$B.imported[mod_name].$is_package=module.$is_package -$B.imported[mod_name].$last_modified=module.$last_modified +function import_py(mod,path,$package){ +var mod_name=mod.__name__,module_contents=$download_module(mod,path,$package) +mod.$src=module_contents +$B.imported[mod_name].$is_package=mod.$is_package +$B.imported[mod_name].$last_modified=mod.$last_modified if(path.substr(path.length-12)=="/__init__.py"){$B.imported[mod_name].__package__=mod_name $B.imported[mod_name].__path__=path -$B.imported[mod_name].$is_package=module.$is_package=true}else if($package){$B.imported[mod_name].__package__=$package}else{var mod_elts=mod_name.split(".") +$B.imported[mod_name].$is_package=mod.$is_package=true}else if($package){$B.imported[mod_name].__package__=$package}else{var mod_elts=mod_name.split(".") mod_elts.pop() $B.imported[mod_name].__package__=mod_elts.join(".")} $B.imported[mod_name].__file__=path -return run_py(module_contents,path,module)} +return run_py(module_contents,path,mod)} function run_py(module_contents,path,module,compiled){ $B.file_cache[path]=module_contents -var root,js +var root,js,mod_name=module.__name__ if(! compiled){var $Node=$B.$Node,$NodeJSCtx=$B.$NodeJSCtx $B.$py_module_path[module.__name__]=path -root=$B.py2js(module_contents,module,module.__name__,$B.builtins_scope) -if(module.__package__ !==undefined){root.binding["__package__"]=true} -var body=root.children -root.children=[] -var mod_node=new $Node("expression") -new $NodeJSCtx(mod_node,"var $module = (function()") -root.insert(0,mod_node) -for(var i=0,len=body.length;i < len;i++){mod_node.add(body[i])} -var ret_node=new $Node("expression") -new $NodeJSCtx(ret_node,"return $locals_"+ -module.__name__.replace(/\./g,"_")) -mod_node.add(ret_node) -var ex_node=new $Node("expression") -new $NodeJSCtx(ex_node,")(__BRYTHON__)") -root.add(ex_node)} +var src={src:module_contents,has_annotations:false} +root=$B.py2js(src,module,module.__name__,$B.builtins_scope) +if(module.__package__ !==undefined){root.binding["__package__"]=true}} try{js=compiled ? module_contents :root.to_js() if($B.$options.debug==10){console.log("code for module "+module.__name__) console.log(js)} -js+="; return $module" +var src=js +js="var $module = (function(){\n"+js+"return $locals_"+ +module.__name__.replace(/\./g,"_")+"})(__BRYTHON__)\n"+ +"return $module" var module_id="$locals_"+module.__name__.replace(/\./g,'_') -var $module=(new Function(module_id,js))(module)}catch(err){console.log(err+" for module "+module.__name__) +var $module=(new Function(module_id,js))(module)}catch(err){if($B.debug > 1){console.log(err+" for module "+module.__name__) console.log("module",module) console.log(root) -console.log(err) if($B.debug > 1){console.log(js)} for(var attr in err){console.log(attr,err[attr])} console.log(_b_.getattr(err,"info","[no info]")) console.log("message: "+err.$message) console.log("filename: "+err.fileName) -console.log("linenum: "+err.lineNumber) +console.log("linenum: "+err.lineNumber)} if($B.debug > 0){console.log("line info "+$B.line_info)} -throw err}finally{root=null -js=null -$B.clear_ns(module.__name__)} +throw err}finally{$B.clear_ns(module.__name__)} try{ var mod=eval("$module") for(var attr in mod){module[attr]=mod[attr]} module.__initializing__=false $B.imported[module.__name__]=module -$B.file_cache[module.__name__]=module_contents -return true}catch(err){console.log(""+err+" "+" for module "+module.__name__) +return{ +content:src,name:mod_name,imports:Object.keys(root.imports).join(",")}}catch(err){console.log(""+err+" "+" for module "+module.__name__) for(var attr in err){console.log(attr+" "+err[attr])} if($B.debug > 0){console.log("line info "+__BRYTHON__.line_info)} throw err}} -$B.run_py=run_py +$B.run_py=run_py function new_spec(fields){ fields.__class__=module return fields} var finder_VFS={__class__:_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"VFSFinder"},create_module :function(cls,spec){ -return _b_.None},exec_module :function(cls,modobj){var stored=modobj.__spec__.loader_state.stored +return _b_.None},exec_module :function(cls,modobj){var stored=modobj.__spec__.loader_state.stored,timestamp=modobj.__spec__.loader_state.timestamp delete modobj.__spec__["loader_state"] var ext=stored[0],module_contents=stored[1],imports=stored[2] modobj.$is_package=stored[3]||false -var path=$B.brython_path+"Lib/"+modobj.__name__ -if(modobj.$is_package){path+="/__init__.py"} +var path="VFS."+modobj.__name__ +path+=modobj.$is_package ? "/__init__.py" :ext modobj.__file__=path -if(ext=='.js'){run_js(module_contents,modobj.__path__,modobj)}else if($B.precompiled.hasOwnProperty(modobj.__name__)){var parts=modobj.__name__.split(".") +$B.file_cache[modobj.__file__]=$B.VFS[modobj.__name__][1] +if(ext=='.js'){run_js(module_contents,modobj.__path__,modobj)}else if($B.precompiled.hasOwnProperty(modobj.__name__)){if($B.debug > 1){console.info("load",modobj.__name__,"from precompiled")} +var parts=modobj.__name__.split(".") for(var i=0;i < parts.length;i++){var parent=parts.slice(0,i+1).join(".") if($B.imported.hasOwnProperty(parent)&& $B.imported[parent].__initialized__){continue} -var mod_js=$B.precompiled[parent],is_package=Array.isArray(mod_js) -if(is_package){mod_js=mod_js[0]} -$B.imported[parent]=module.$factory(parent,undefined,is_package) -$B.imported[parent].__initialized__=true -$B.imported[parent].__file__= -$B.imported[parent].__cached__="VFS."+ -modobj.__name__+".py" -$B.file_cache[$B.imported[parent].__file__]=module_contents -if(is_package){$B.imported[parent].__path__="" -$B.imported[parent].__package__=parent}else{var elts=parent.split(".") +var mod_js=$B.precompiled[parent],is_package=modobj.$is_package +if(Array.isArray(mod_js)){mod_js=mod_js[0]} +var mod=$B.imported[parent]=module.$factory(parent,undefined,is_package) +mod.__initialized__=true +if(is_package){mod.__path__="" +mod.__package__=parent}else{var elts=parent.split(".") elts.pop() -$B.imported[parent].__package__=elts.join(".")} +mod.__package__=elts.join(".")} +mod.__file__=path try{var parent_id=parent.replace(/\./g,"_") mod_js+="return $locals_"+parent_id var $module=new Function("$locals_"+parent_id,mod_js)( -$B.imported[parent])}catch(err){if($B.debug > 1){console.log(err) +mod)}catch(err){if($B.debug > 1){console.log(err) for(var k in err){console.log(k,err[k])} -console.log(Object.keys($B.imported))} +console.log(Object.keys($B.imported)) +if($B.debug > 2){console.log(modobj,"mod_js",mod_js)}} throw err} -for(var attr in $module){$B.imported[parent][attr]=$module[attr]} -if(i>0){ -$B.builtins.setattr($B.imported[parts.slice(0,i).join(".")],parts[i],$module)}} -return $module}else{if($B.debug > 1){console.log("run Python code from VFS",modobj.__name__)} -run_py(module_contents,modobj.__path__,modobj,ext=='.pyc.js')}},find_module:function(cls,name,path){return{ +for(var attr in $module){mod[attr]=$module[attr]} +$module.__file__=path +if(i > 0){ +$B.builtins.setattr( +$B.imported[parts.slice(0,i).join(".")],parts[i],$module)}} +return $module}else{var mod_name=modobj.__name__ +if($B.debug > 1){console.log("run Python code from VFS",mod_name)} +var record=run_py(module_contents,modobj.__path__,modobj) +record.is_package=modobj.$is_package +record.timestamp=$B.timestamp +record.source_ts=timestamp +$B.precompiled[mod_name]=record.is_package ?[record.content]: +record.content +var elts=mod_name.split(".") +if(elts.length > 1){elts.pop()} +if($B.$options.indexedDB && self.indexedDB && +$B.idb_name){ +var idb_cx=indexedDB.open($B.idb_name) +idb_cx.onsuccess=function(evt){var db=evt.target.result,tx=db.transaction("modules","readwrite"),store=tx.objectStore("modules"),cursor=store.openCursor(),request=store.put(record) +request.onsuccess=function(){if($B.debug > 1){console.info(modobj.__name__,"stored in db")}} +request.onerror=function(){console.info("could not store "+modobj.__name__)}}}}},find_module:function(cls,name,path){return{ __class__:Loader,load_module:function(name,path){var spec=cls.find_spec(cls,name,path) var mod=module.$factory(name) $B.imported[name]=mod mod.__spec__=spec -cls.exec_module(cls,mod)}}},find_spec :function(cls,fullname,path,prev_module){if(!$B.use_VFS){return _b_.None} -var stored=$B.VFS[fullname] +cls.exec_module(cls,mod)}}},find_spec :function(cls,fullname,path,prev_module){var stored,is_package,timestamp +if(!$B.use_VFS){return _b_.None} +stored=$B.VFS[fullname] if(stored===undefined){return _b_.None} -var is_package=stored[3]||false,is_builtin=$B.builtin_module_names.indexOf(fullname)>-1 +is_package=stored[3]||false +timestamp=stored.timestamp +if(stored){var is_builtin=$B.builtin_module_names.indexOf(fullname)>-1 return new_spec({name :fullname,loader:cls, origin :is_builtin? "built-in" :"brython_stdlib", -submodule_search_locations:is_package?[]:_b_.None,loader_state:{stored:stored}, -cached:_b_.None,parent:is_package? fullname :parent_package(fullname),has_location:_b_.False})}} +submodule_search_locations:is_package?[]:_b_.None,loader_state:{stored:stored,timestamp:timestamp}, +cached:_b_.None,parent:is_package? fullname :parent_package(fullname),has_location:_b_.False})}}} $B.set_func_names(finder_VFS,"") for(var method in finder_VFS){if(typeof finder_VFS[method]=="function"){finder_VFS[method]=_b_.classmethod.$factory( finder_VFS[method])}} finder_VFS.$factory=function(){return{__class__:finder_VFS}} +var finder_cpython={__class__:_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"CPythonFinder"},create_module :function(cls,spec){ +return _b_.None},exec_module :function(cls,modobj){console.log("exec PYthon module",modobj) +var loader_state=modobj.__spec__.loader_state +var content=loader_state.content +delete modobj.__spec__["loader_state"] +modobj.$is_package=loader_state.is_package +modobj.__file__=loader_state.__file__ +$B.file_cache[modobj.__file__]=content +var mod_name=modobj.__name__ +if($B.debug > 1){console.log("run Python code from CPython",mod_name)} +run_py(content,modobj.__path__,modobj)},find_module:function(cls,name,path){return{ +__class__:Loader,load_module:function(name,path){var spec=cls.find_spec(cls,name,path) +var mod=module.$factory(name) +$B.imported[name]=mod +mod.__spec__=spec +cls.exec_module(cls,mod)}}},find_spec :function(cls,fullname,path,prev_module){console.log("finder cpython",fullname) +var xhr=new XMLHttpRequest(),url="/cpython_import?module="+fullname,result +xhr.open("GET",url,false) +xhr.onreadystatechange=function(){if(this.readyState==4 && this.status==200){var data=JSON.parse(this.responseText) +result=new_spec({name :fullname,loader:cls, +origin :"CPython", +submodule_search_locations:data.is_package?[]:_b_.None,loader_state:{content:data.content}, +cached:_b_.None,parent:data.is_package? fullname :parent_package(fullname),has_location:_b_.False})}} +xhr.send() +return result}} +$B.set_func_names(finder_cpython,"") +for(var method in finder_cpython){if(typeof finder_cpython[method]=="function"){finder_cpython[method]=_b_.classmethod.$factory( +finder_cpython[method])}} +finder_cpython.$factory=function(){return{__class__:finder_cpython}} var finder_stdlib_static={$factory :finder_stdlib_static,__class__ :_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"StdlibStatic"},create_module :function(cls,spec){ return _b_.None},exec_module :function(cls,module){var metadata=module.__spec__.loader_state module.$is_package=metadata.is_package @@ -8900,7 +9518,8 @@ if(address===undefined){var elts=fullname.split(".") if(elts.length > 1){elts.pop() var $package=$B.stdlib[elts.join(".")] if($package && $package[1]){address=["py"]}}} -if(address !==undefined){var ext=address[0],is_pkg=address[1]!==undefined,path=$B.brython_path+((ext=="py")? "Lib/" :"libs/")+ +if(address !==undefined){var ext=address[0],is_pkg=address[1]!==undefined,path=$B.brython_path+ +((ext=="py")? "Lib/" :"libs/")+ fullname.replace(/\./g,"/"),metadata={ext:ext,is_package:is_pkg,path:path+(is_pkg? "/__init__.py" : ((ext=="py")? ".py" :".js")),address:address} var res=new_spec({name :fullname,loader:cls, @@ -8913,11 +9532,11 @@ for(var method in finder_stdlib_static){if(typeof finder_stdlib_static[method]== finder_stdlib_static[method])}} finder_stdlib_static.$factory=function(){return{__class__:finder_stdlib_static}} var finder_path={__class__:_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"ImporterPath"},create_module :function(cls,spec){ -return _b_.None},exec_module :function(cls,module){var _spec=_b_.getattr(module,"__spec__"),code=_spec.loader_state.code; -module.$is_package=_spec.loader_state.is_package,delete _spec.loader_state["code"] +return _b_.None},exec_module :function(cls,_module){var _spec=$B.$getattr(_module,"__spec__"),code=_spec.loader_state.code; +_module.$is_package=_spec.loader_state.is_package,delete _spec.loader_state["code"] var src_type=_spec.loader_state.type -if(src_type=="py" ||src_type=="pyc.js"){run_py(code,_spec.origin,module,src_type=="pyc.js")} -else if(_spec.loader_state.type=="js"){run_js(code,_spec.origin,module)}},find_module:function(cls,name,path){return finder_path.find_spec(cls,name,path)},find_spec :function(cls,fullname,path,prev_module){var current_module=$B.last($B.frames_stack)[2] +if(src_type=="py" ||src_type=="pyc.js"){run_py(code,_spec.origin,_module,src_type=="pyc.js")} +else if(_spec.loader_state.type=="js"){run_js(code,_spec.origin,_module)}},find_module:function(cls,name,path){return finder_path.find_spec(cls,name,path)},find_spec :function(cls,fullname,path,prev_module){var current_module=$B.last($B.frames_stack)[2] if($B.VFS && $B.VFS[current_module]){ return _b_.None} if($B.is_none(path)){ @@ -8929,13 +9548,13 @@ if(finder===undefined){var finder_notfound=true for(var j=0,lj=$B.path_hooks.length; j < lj && finder_notfound;++j){var hook=$B.path_hooks[j].$factory try{finder=(typeof hook=="function" ? hook : -_b_.getattr(hook,"__call__"))(path_entry) +$B.$getattr(hook,"__call__"))(path_entry) finder_notfound=false}catch(e){if(e.__class__ !==_b_.ImportError){throw e}}} if(finder_notfound){$B.path_importer_cache[path_entry]=_b_.None}} if($B.is_none(finder)){continue} -var find_spec=_b_.getattr(finder,"find_spec"),fs_func=typeof find_spec=="function" ? +var find_spec=$B.$getattr(finder,"find_spec"),fs_func=typeof find_spec=="function" ? find_spec : -_b_.getattr(find_spec,"__call__") +$B.$getattr(find_spec,"__call__") var spec=fs_func(fullname,prev_module) if(!$B.is_none(spec)){return spec}} return _b_.None}} @@ -8943,39 +9562,9 @@ $B.set_func_names(finder_path,"") for(var method in finder_path){if(typeof finder_path[method]=="function"){finder_path[method]=_b_.classmethod.$factory( finder_path[method])}} finder_path.$factory=function(){return{__class__:finder_path}} -var vfs_hook={__class__:_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"VfsPathFinder"},load_vfs:function(self){try{var code=$download_module({__name__:""},self.path)} -catch(e){self.vfs=undefined -throw new _b_.ImportError.$factory(e.$message ||e.message)} -eval(code) -code=null -try{self.vfs=$vfs} -catch(e){throw new _b_.ImportError.$factory("Expecting $vfs var in VFS file")} -$B.path_importer_cache[self.path+"/"]=self},find_spec:function(self,fullname,module){if(self.vfs===undefined){try{vfs_hook.load_vfs(self)} -catch(e){console.log("Could not load VFS while importing '"+ -fullname+"'") -return _b_.None}} -self.__class__.vfs=self.vfs -var stored=self.vfs[fullname] -if(stored===undefined){return _b_.None} -var is_package=stored[2] -return new_spec({name :fullname,loader:finder_VFS, -origin :self.path+'#'+fullname, -submodule_search_locations:is_package?[self.path]: -_b_.None,loader_state:{stored:stored}, -cached:_b_.None,parent:is_package ? fullname :parent_package(fullname),has_location:_b_.True})},invalidate_caches:function(self){self.vfs=undefined}} -vfs_hook.$factory=function(path){if(path.substr(-1)=='/'){path=path.slice(0,-1)} -var ext=path.substr(-7) -if(ext !='.vfs.js'){throw _b_.ImportError.$factory('VFS file URL must end with .vfs.js extension');} -self={__class__:vfs_hook,path:path} -return self} -$B.set_func_names(vfs_hook,"") var url_hook={__class__:_b_.type,__mro__:[_b_.object],__repr__:function(self){return "'},$infos:{__module__:"builtins",__name__:"UrlPathFinder"},find_spec :function(self,fullname,module){var loader_data={},notfound=true,hint=self.hint,base_path=self.path_entry+fullname.match(/[^.]+$/g)[0],modpaths=[] var tryall=hint===undefined -if(tryall ||hint=="js"){ -modpaths=[[base_path+".js","js",false]]} -if(tryall ||hint=='pyc.js'){ -modpaths=modpaths.concat([[base_path+".pyc.js","pyc.js",false],[base_path+"/__init__.pyc.js","pyc.js",true]])} if(tryall ||hint=='py'){ modpaths=modpaths.concat([[base_path+".py","py",false],[base_path+"/__init__.py","py",true]])} for(var j=0;notfound && j < modpaths.length;++j){try{var file_info=modpaths[j],module={__name__:fullname,$is_package:false} @@ -8995,27 +9584,26 @@ submodule_search_locations:loader_data.is_package? cached:_b_.None,parent:loader_data.is_package? fullname : parent_package(fullname),has_location:_b_.True})} return _b_.None},invalidate_caches :function(self){}} -url_hook.$factory=function(path_entry,hint){return{__class__:url_hook,path_entry:path_entry,hint:hint}} +url_hook.$factory=function(path_entry,hint){return{ +__class__:url_hook,path_entry:path_entry.endsWith("/")? path_entry :path_entry+"/",hint:hint}} $B.set_func_names(url_hook,"") $B.path_importer_cache={}; var _sys_paths=[[$B.script_dir+"/","py"],[$B.brython_path+"Lib/","py"],[$B.brython_path+"Lib/site-packages/","py"],[$B.brython_path+"libs/","js"]] for(var i=0;i < _sys_paths.length;++i){var _path=_sys_paths[i],_type=_path[1] _path=_path[0] $B.path_importer_cache[_path]=url_hook.$factory(_path,_type)} -delete _path -delete _type -delete _sys_paths function import_error(mod_name){var exc=_b_.ImportError.$factory(mod_name) exc.name=mod_name throw exc} -$B.$__import__=function(mod_name,globals,locals,fromlist,level){ +$B.$__import__=function(mod_name,globals,locals,fromlist,level){var $test=false +if($test){console.log("__import__",mod_name)} var from_stdlib=false if(globals.$jsobj && globals.$jsobj.__file__){var file=globals.$jsobj.__file__ if((file.startsWith($B.brython_path+"Lib/")&& ! file.startsWith($B.brython_path+"Lib/site-packages/"))|| file.startsWith($B.brython_path+"libs/")|| file.startsWith("VFS.")){from_stdlib=true}} -var modobj=$B.imported[mod_name],parsed_name=mod_name.split('.') +var modobj=$B.imported[mod_name],parsed_name=mod_name.split('.'),has_from=fromlist.length > 0 if(modobj==_b_.None){ import_error(mod_name)} if(modobj===undefined){ @@ -9028,19 +9616,32 @@ if(modobj==_b_.None){ import_error(_mod_name)}else if(modobj===undefined){try{$B.import_hooks(_mod_name,__path__,from_stdlib)}catch(err){delete $B.imported[_mod_name] throw err} if($B.is_none($B.imported[_mod_name])){import_error(_mod_name)}else{ -if(_parent_name){_b_.setattr($B.imported[_parent_name],parsed_name[i],$B.imported[_mod_name])}}} -if(i < len){try{__path__=_b_.getattr($B.imported[_mod_name],"__path__")}catch(e){ +if(_parent_name){_b_.setattr($B.imported[_parent_name],parsed_name[i],$B.imported[_mod_name])}}}else if($B.imported[_parent_name]&& +$B.imported[_parent_name][parsed_name[i]]===undefined){ +_b_.setattr($B.imported[_parent_name],parsed_name[i],$B.imported[_mod_name])} +if(i < len){try{__path__=$B.$getattr($B.imported[_mod_name],"__path__")}catch(e){ if(i==len-1 && $B.imported[_mod_name][parsed_name[len]]&& $B.imported[_mod_name][parsed_name[len]].__class__=== module){return $B.imported[_mod_name][parsed_name[len]]} -import_error(_mod_name)}}}}else{if($B.imported[parsed_name[0]]&& -parsed_name.length > 1){try{$B.$setattr($B.imported[parsed_name[0]],parsed_name[1],modobj)}catch(err){console.log("error",parsed_name,modobj) +if(has_from){ +import_error(mod_name)}else{ +var exc=_b_.ModuleNotFoundError.$factory() +exc.msg="No module named '"+mod_name+"'; '"+ +_mod_name+"' is not a package" +exc.args=$B.fast_tuple([exc.msg]) +exc.name=mod_name +exc.path=_b_.None +throw exc}}}}}else{if($B.imported[parsed_name[0]]&& +parsed_name.length==2){try{if($B.imported[parsed_name[0]][parsed_name[1]]===undefined){$B.$setattr($B.imported[parsed_name[0]],parsed_name[1],modobj)}}catch(err){console.log("error",parsed_name,modobj) throw err}}} if(fromlist.length > 0){ return $B.imported[mod_name]}else{ return $B.imported[parsed_name[0]]}} -$B.$import=function(mod_name,fromlist,aliases,locals){var parts=mod_name.split(".") +$B.$import=function(mod_name,fromlist,aliases,locals){fromlist=fromlist===undefined ?[]:fromlist +aliases=aliases===undefined ?{}:aliases +locals=locals===undefined ?{}:locals +var parts=mod_name.split(".") if(mod_name[mod_name.length-1]=="."){parts.pop()} var norm_parts=[],prefix=true for(var i=0,len=parts.length;i < len;i++){var p=parts[i] @@ -9058,11 +9659,11 @@ if(__import__===undefined){ __import__=$B.$__import__} var importer=typeof __import__=="function" ? __import__ : -_b_.getattr(__import__,"__call__"),modobj=importer(mod_name,globals,undefined,fromlist,0) +$B.$getattr(__import__,"__call__"),modobj=importer(mod_name,globals,undefined,fromlist,0) if(! fromlist ||fromlist.length==0){ var alias=aliases[mod_name] if(alias){locals[alias]=$B.imported[mod_name]}else{locals[$B.to_alias(norm_parts[0])]=modobj}}else{var __all__=fromlist,thunk={} -if(fromlist && fromlist[0]=="*"){__all__=_b_.getattr(modobj,"__all__",thunk); +if(fromlist && fromlist[0]=="*"){__all__=$B.$getattr(modobj,"__all__",thunk); if(__all__ !==thunk){ aliases={}}} if(__all__===thunk){ @@ -9070,28 +9671,34 @@ for(var attr in modobj){if(attr[0]!=="_"){locals[attr]=modobj[attr]}}}else{ for(var i=0,l=__all__.length;i < l;++i){var name=__all__[i] var alias=aliases[name]||name try{ -locals[alias]=$B.$getattr(modobj,name);}catch($err1){ +locals[alias]=$B.$getattr(modobj,name)}catch($err1){ try{var name1=$B.from_alias(name) -$B.$getattr(__import__,'__call__')(mod_name+'.'+name1,globals,undefined,[],0); -locals[alias]=$B.$getattr(modobj,name1);}catch($err3){ +$B.$getattr(__import__,'__call__')(mod_name+'.'+name1,globals,undefined,[],0) +locals[alias]=$B.$getattr(modobj,name1)}catch($err3){ if(mod_name==="__future__"){ var frame=$B.last($B.frames_stack),line_info=frame[3].$line_info,line_elts=line_info.split(','),line_num=parseInt(line_elts[0]) $B.$SyntaxError(frame[2],"future feature "+name+" is not defined",current_frame[3].src,undefined,line_num)} -if($err3.$py_error){throw _b_.ImportError.$factory( +if($err3.$py_error){var errname=$err3.__class__.$infos.__name__ +if($err3.__class__ !==_b_.ImportError && +$err3.__class__ !==_b_.ModuleNotFoundError){$B.handle_error($err3)} +throw _b_.ImportError.$factory( "cannot import name '"+name+"'")} -console.log($err3) -console.log($B.last($B.frames_stack)) +if($B.debug > 1){console.log($err3) +console.log($B.last($B.frames_stack))} throw _b_.ImportError.$factory( -"cannot import name '"+name+"'")}}}}}} -$B.$path_hooks=[vfs_hook,url_hook] +"cannot import name '"+name+"'")}}}} +return locals}} +$B.import_all=function(locals,module){ +for(var attr in module){if(attr.startsWith("$$")){locals[attr]=module[attr]}else if('_$'.indexOf(attr.charAt(0))==-1){locals[attr]=module[attr]}}} +$B.$path_hooks=[url_hook] $B.$meta_path=[finder_VFS,finder_stdlib_static,finder_path] -$B.finders={VFS:finder_VFS,stdlib_static:finder_stdlib_static,path:finder_path} +$B.finders={VFS:finder_VFS,stdlib_static:finder_stdlib_static,path:finder_path,CPython:finder_cpython} function optimize_import_for_path(path,filetype){if(path.slice(-1)!="/"){path=path+"/" } var value=(filetype=='none')? _b_.None : url_hook.$factory(path,filetype) $B.path_importer_cache[path]=value} var Loader={__class__:$B.$type,__mro__:[_b_.object],__name__ :"Loader"} -var _importlib_module={__class__ :module,__name__ :"_importlib",Loader:Loader,VFSFinder:finder_VFS,StdlibStatic:finder_stdlib_static,ImporterPath:finder_path,VFSPathFinder :vfs_hook,UrlPathFinder:url_hook,optimize_import_for_path :optimize_import_for_path} +var _importlib_module={__class__ :module,__name__ :"_importlib",Loader:Loader,VFSFinder:finder_VFS,StdlibStatic:finder_stdlib_static,ImporterPath:finder_path,UrlPathFinder:url_hook,optimize_import_for_path :optimize_import_for_path} _importlib_module.__repr__=_importlib_module.__str__=function(){return ""} $B.imported["_importlib"]=_importlib_module})(__BRYTHON__) ; @@ -9102,7 +9709,7 @@ function $err(op,other){var msg="unsupported operand type(s) for "+op+ ": 'float' and '"+$B.class_name(other)+"'" throw _b_.TypeError.$factory(msg)} function float_value(obj){ -return obj.$value !==undefined ? obj.$value :obj} +return obj.$brython_value !==undefined ? obj.$brython_value :obj} var float={__class__:_b_.type,__dir__:object.__dir__,$infos:{__module__:"builtins",__name__:"float"},$is_class:true,$native:true,$descriptors:{"numerator":true,"denominator":true,"imag":true,"real":true}} float.numerator=function(self){return float_value(self)} float.denominator=function(self){return _b_.int.$factory(1)} @@ -9189,13 +9796,23 @@ var prec=fmt.precision if(prec==0){return Math.round(self)+""} var res=self.toFixed(prec),pt_pos=res.indexOf(".") if(fmt.type !==undefined && -(fmt.type=="%" ||fmt.type.toLowerCase()=="f")){if(pt_pos==-1){res+="."+"0".repeat(fmt.precision)} -else{var missing=fmt.precision-res.length+pt_pos+1 -if(missing > 0){res+="0".repeat(missing)}}}else{var res1=self.toExponential(fmt.precision-1),exp=parseInt(res1.substr(res1.search("e")+1)) +(fmt.type=="%" ||fmt.type.toLowerCase()=="f")){if(pt_pos==-1){res+="."+"0".repeat(fmt.precision)}else{var missing=fmt.precision-res.length+pt_pos+1 +if(missing > 0){res+="0".repeat(missing)}}}else if(fmt.type && fmt.type.toLowerCase()=="g"){var exp_fmt=preformat(self,{type:"e"}).split("e"),exp=parseInt(exp_fmt[1]) +if(-4 <=exp && exp < fmt.precision){res=preformat(self,{type:"f",precision:fmt.precision-1-exp})}else{res=preformat(self,{type:"e",precision:fmt.precision-1})} +var parts=res.split("e") +if(fmt.alternate){if(parts[0].search(/\./)==-1){parts[0]+='.'}}else{if(parts[1]){var signif=parts[0] +while(signif.endsWith("0")){signif=signif.substr(0,signif.length-1)} +if(signif.endsWith(".")){signif=signif.substr(0,signif.length-1)} +parts[0]=signif}} +res=parts.join("e") +if(fmt.type=="G"){res=res.toUpperCase()} +return res}else if(fmt.type===undefined){fmt.type="g" +res=preformat(self,fmt) +fmt.type=undefined}else{var res1=self.toExponential(fmt.precision-1),exp=parseInt(res1.substr(res1.search("e")+1)) if(exp <-4 ||exp >=fmt.precision-1){var elts=res1.split("e") while(elts[0].endsWith("0")){elts[0]=elts[0].substr(0,elts[0].length-1)} res=elts.join("e")}}}else{var res=_b_.str.$factory(self)} -if(fmt.type===undefined||"gGn".indexOf(fmt.type)!=-1){ +if(fmt.type===undefined ||"gGn".indexOf(fmt.type)!=-1){ if(res.search("e")==-1){while(res.charAt(res.length-1)=="0"){res=res.substr(0,res.length-1)}} if(res.charAt(res.length-1)=="."){if(fmt.type===undefined){res+="0"} else{res=res.substr(0,res.length-1)}}} @@ -9224,13 +9841,17 @@ r[0]=(r[0]-hipart)*Math.pow(2,31) var x=hipart+_b_.int.$factory(r[0])+(r[1]<< 15) return x & 0xFFFFFFFF} _b_.$isninf=function(x){var x1=x -if(isinstance(x,float)){x1=x.valueOf()} +if(isinstance(x,float)){x1=float.numerator(x)} return x1==-Infinity ||x1==Number.NEGATIVE_INFINITY} _b_.$isinf=function(x){var x1=x -if(isinstance(x,float)){x1=x.valueOf()} +if(isinstance(x,float)){x1=float.numerator(x)} return x1==Infinity ||x1==-Infinity || x1==Number.POSITIVE_INFINITY ||x1==Number.NEGATIVE_INFINITY} -_b_.$fabs=function(x){return x > 0 ? float.$factory(x):float.$factory(-x)} +_b_.$isnan=function(x){var x1=x +if(isinstance(x,float)){x1=float.numerator(x)} +return isNaN(x1)} +_b_.$fabs=function(x){if(x==0){return new Number(0)} +return x > 0 ? float.$factory(x):float.$factory(-x)} _b_.$frexp=function(x){var x1=x if(isinstance(x,float)){x1=x.valueOf()} if(isNaN(x1)||_b_.$isinf(x1)){return[x1,-1]} @@ -9305,15 +9926,14 @@ if(isinstance(other,_b_.bool)){var bool_value=0 if(other.valueOf()){bool_value=1} return new Number(self*bool_value)} if(isinstance(other,_b_.complex)){return $B.make_complex(float.$factory(self*other.$real),float.$factory(self*other.$imag))} -if(hasattr(other,"__rmul__")){return getattr(other,"__rmul__")(self)} -$err("*",other)} +return _b_.NotImplemented} float.__ne__=function(self,other){var res=float.__eq__(self,other) return res===_b_.NotImplemented ? res :! res} float.__neg__=function(self,other){return float.$factory(-float_value(self))} float.__new__=function(cls,value){if(cls===undefined){throw _b_.TypeError.$factory("float.__new__(): not enough arguments")}else if(! _b_.isinstance(cls,_b_.type)){throw _b_.TypeError.$factory("float.__new__(X): X is not a type object")} if(cls===float){return float.$factory(value)} return{ -__class__:cls,__dict__:_b_.dict.$factory(),$value:value ||0}} +__class__:cls,__dict__:$B.empty_dict(),$brython_value:value ||0}} float.__pos__=function(self){return float_value(self)} float.__pow__=function(self,other){self=float_value(self) other=float_value(other) @@ -9336,12 +9956,29 @@ return float.$factory(Math.pow(self,other))}else if(isinstance(other,_b_.complex return $B.make_complex(preal*Math.cos(ln),preal*Math.sin(ln))} if(hasattr(other,"__rpow__")){return getattr(other,"__rpow__")(self)} $err("** or pow()",other)} -float.__repr__=float.__str__=function(self){self=float_value(self) -if(self.valueOf()==Infinity){return 'inf'} -if(self.valueOf()==-Infinity){return '-inf'} -if(isNaN(self.valueOf())){return 'nan'} -var res=self.valueOf()+"" +float.__repr__=float.__str__=function(self){self=float_value(self).valueOf() +if(self==Infinity){return 'inf'} +if(self==-Infinity){return '-inf'} +if(isNaN(self)){return 'nan'} +if(self===0){if(1/self===-Infinity){return '-0.0'} +return '0.0'} +var res=self+"" if(res.indexOf(".")==-1){res+=".0"} +var x,y +[x,y]=res.split('.') +if(x.length > 16){var exp=x.length-1,int_part=x[0],dec_part=x.substr(1)+y +while(dec_part.endsWith("0")){dec_part=dec_part.substr(0,dec_part.length-1)} +var mant=int_part +if(dec_part.length > 0){mant+='.'+dec_part} +return mant+'e+'+exp}else if(x=="0"){var exp=0 +while(exp < y.length && y.charAt(exp)=="0"){exp++} +if(exp > 3){ +var rest=y.substr(exp),exp=(exp+1).toString() +while(rest.endsWith("0")){rest=rest.substr(0,res.length-1)} +var mant=rest[0] +if(rest.length > 1){mant+='.'+rest.substr(1)} +if(exp.length==1){exp='0'+exp} +return mant+'e-'+exp}} return _b_.str.$factory(res)} float.__setattr__=function(self,attr,value){if(self.constructor===Number){if(float[attr]===undefined){throw _b_.AttributeError.$factory("'float' object has no attribute '"+ attr+"'")}else{throw _b_.AttributeError.$factory("'float' object attribute '"+ @@ -9353,7 +9990,7 @@ other=float_value(other) if(isinstance(other,[_b_.int,float])){if(other.valueOf()==0){throw ZeroDivisionError.$factory("division by zero")} return float.$factory(self/other)} if(isinstance(other,_b_.complex)){var cmod=other.$real*other.$real+other.$imag*other.$imag -if(cmod==0){throw ZeroDivisionError.$factory("division by zero")} +if(cmod==0){throw _b_.ZeroDivisionError.$factory("division by zero")} return $B.make_complex(float.$factory(self*other.$real/cmod),float.$factory(-self*other.$imag/cmod))} if(hasattr(other,"__rtruediv__")){return getattr(other,"__rtruediv__")(self)} $err("/",other)} @@ -9364,7 +10001,9 @@ if(isinstance(other,float)){return float.$factory(self-other)} if(isinstance(other,_b_.bool)){var bool_value=0 if(other.valueOf()){bool_value=1} return float.$factory(self-bool_value)} -if(isinstance(other,_b_.complex)){return $B.make_complex(self-other.$real,-other.$imag)} +if(isinstance(other,_b_.complex)){if(other.$imag==0){ +return $B.make_complex(self-other.$real,0)} +return $B.make_complex(self-other.$real,-other.$imag)} if(hasattr(other,"__rsub__")){return getattr(other,"__rsub__")(self)} $err("-",other)} $op_func+="" @@ -9415,10 +10054,9 @@ return new Number(1) case false: return new Number(0)} if(typeof value=="number"){return new Number(value)} -if(isinstance(value,float)){return value} +if(isinstance(value,float)){return float_value(value)} if(isinstance(value,bytes)){var s=getattr(value,"decode")("latin-1") return float.$factory(getattr(value,"decode")("latin-1"))} -if(hasattr(value,"__float__")){return $FloatClass(getattr(value,"__float__")())} if(typeof value=="string"){value=value.trim() switch(value.toLowerCase()){case "+inf": case "inf": @@ -9444,6 +10082,8 @@ _b_.str.encode(value,"latin-1") throw _b_.ValueError.$factory( "Could not convert to float(): '"+ _b_.str.$factory(value)+"'")}}} +var klass=value.__class__ ||$B.get_class(value),num_value=$B.to_num(value,["__float__","__index__"]) +if(num_value !==null){return num_value} throw _b_.TypeError.$factory("float() argument must be a string or a "+ "number, not '"+$B.class_name(value)+"'")} $B.$FloatClass=$FloatClass @@ -9458,11 +10098,13 @@ _b_.float=float})(__BRYTHON__) ; ;(function($B){var _b_=$B.builtins function $err(op,other){var msg="unsupported operand type(s) for "+op+ -": 'int' and '"+$B.class_name(other)+"'" +" : 'int' and '"+$B.class_name(other)+"'" throw _b_.TypeError.$factory(msg)} function int_value(obj){ -return obj.$value !==undefined ? obj.$value :obj} +return obj.$brython_value !==undefined ? obj.$brython_value :obj} var int={__class__:_b_.type,__dir__:_b_.object.__dir__,$infos:{__module__:"builtins",__name__:"int"},$is_class:true,$native:true,$descriptors:{"numerator":true,"denominator":true,"imag":true,"real":true}} +int.as_integer_ratio=function(){var $=$B.args("as_integer_ratio",1,{self:null},["self"],arguments,{},null,null) +return $B.$list([$.self,1])} int.from_bytes=function(){var $=$B.args("from_bytes",3,{bytes:null,byteorder:null,signed:null},["bytes","byteorder","signed"],arguments,{signed:false},null,null) var x=$.bytes,byteorder=$.byteorder,signed=$.signed,_bytes,_len if(_b_.isinstance(x,[_b_.bytes,_b_.bytearray])){_bytes=x.source @@ -9507,7 +10149,7 @@ __class__:_b_.bytes,source:res}} int.__abs__=function(self){return _b_.abs(self)} int.__bool__=function(self){return int_value(self).valueOf()==0 ? false :true} int.__ceil__=function(self){return Math.ceil(int_value(self))} -int.__divmod__=function(self,other){return _b_.divmod(self,other)} +int.__divmod__=function(self,other){return $B.fast_tuple([int.__floordiv__(self,other),int.__mod__(self,other)])} int.__eq__=function(self,other){ if(other===undefined){return self===int} if(_b_.isinstance(other,int)){return self.valueOf()==int_value(other).valueOf()} @@ -9557,9 +10199,10 @@ int.__floordiv__=function(self,other){if(other.__class__===$B.long_int){return $ if(_b_.isinstance(other,int)){other=int_value(other) if(other==0){throw _b_.ZeroDivisionError.$factory("division by zero")} return Math.floor(self/other)} -if(_b_.isinstance(other,_b_.float)){if(!other.valueOf()){throw _b_.ZeroDivisionError.$factory("division by zero")} -return Math.floor(self/other)} -if(hasattr(other,"__rfloordiv__")){return $B.$getattr(other,"__rfloordiv__")(self)} +if(_b_.isinstance(other,_b_.float)){other=_b_.float.numerator(other) +if(!other.valueOf()){throw _b_.ZeroDivisionError.$factory("division by zero")} +return new Number(Math.floor(self/other))} +if(_b_.hasattr(other,"__rfloordiv__")){return $B.$getattr(other,"__rfloordiv__")(self)} $err("//",other)} int.__hash__=function(self){if(self===undefined){return int.__hashvalue__ ||$B.$py_next_hash--} return self.valueOf()} @@ -9583,7 +10226,7 @@ else if(other===true){other=1} if(other==0){throw _b_.ZeroDivisionError.$factory( "integer division or modulo by zero")} return(self % other+other)% other} -if(hasattr(other,"__rmod__")){return $B.$getattr(other,"__rmod__")(self)} +if(_b_.hasattr(other,"__rmod__")){return $B.$getattr(other,"__rmod__")(self)} $err("%",other)} int.__mro__=[_b_.object] int.__mul__=function(self,other){var val=self.valueOf() @@ -9592,7 +10235,7 @@ if(_b_.isinstance(other,int)){other=int_value(other) var res=self*other if(res > $B.min_int && res < $B.max_int){return res} else{return int.$factory($B.long_int.__mul__($B.long_int.$factory(self),$B.long_int.$factory(other)))}} -if(_b_.isinstance(other,_b_.float)){return new Number(self*other)} +if(_b_.isinstance(other,_b_.float)){return new Number(self*_b_.float.numerator(other))} if(_b_.isinstance(other,_b_.bool)){if(other.valueOf()){return self} return int.$factory(0)} if(_b_.isinstance(other,_b_.complex)){return $B.make_complex(int.__mul__(self,other.$real),int.__mul__(self,other.$imag))} @@ -9609,16 +10252,24 @@ int.__neg__=function(self){return-self} int.__new__=function(cls,value){if(cls===undefined){throw _b_.TypeError.$factory("int.__new__(): not enough arguments")}else if(! _b_.isinstance(cls,_b_.type)){throw _b_.TypeError.$factory("int.__new__(X): X is not a type object")} if(cls===int){return int.$factory(value)} return{ -__class__:cls,__dict__:_b_.dict.$factory(),$value:value ||0}} +__class__:cls,__dict__:$B.empty_dict(),$brython_value:value ||0}} int.__pos__=function(self){return self} -int.__pow__=function(self,other,z){if(_b_.isinstance(other,int)){other=int_value(other) +function extended_euclidean(a,b){var d,u,v +if(b==0){return[a,1,0]}else{[d,u,v]=extended_euclidean(b,a % b) +return[d,v,u-Math.floor(a/b)*v]}} +int.__pow__=function(self,other,z){if(typeof other=="number" ||_b_.isinstance(other,int)){other=int_value(other) switch(other.valueOf()){case 0: return int.$factory(1) case 1: return int.$factory(self.valueOf())} -if(z !==undefined && z !==null){ +if(z !==undefined && z !==_b_.None){ if(z==1){return 0} var result=1,base=self % z,exponent=other,long_int=$B.long_int +if(exponent < 0){var gcd,inv,_ +[gcd,inv,_]=extended_euclidean(self,z) +if(gcd !==1){throw _b_.ValueError.$factory("not relative primes: "+ +self+' and '+z)} +return int.__pow__(inv,-exponent,z)} while(exponent > 0){if(exponent % 2==1){if(result*base > $B.max_int){result=long_int.__mul__( long_int.$factory(result),long_int.$factory(base)) result=long_int.__mod__(result,z)}else{result=(result*base)% z}} @@ -9629,12 +10280,16 @@ return result} var res=Math.pow(self.valueOf(),other.valueOf()) if(res > $B.min_int && res < $B.max_int){return res} else if(res !==Infinity && !isFinite(res)){return res} -else{return int.$factory($B.long_int.__pow__($B.long_int.$factory(self),$B.long_int.$factory(other)))}} -if(_b_.isinstance(other,_b_.float)){if(self >=0){return new Number(Math.pow(self,other.valueOf()))} +else{if($B.BigInt){return{ +__class__:$B.long_int,value:($B.BigInt(self)**$B.BigInt(other)).toString(),pos:true}} +return $B.long_int.__pow__($B.long_int.$from_int(self),$B.long_int.$from_int(other))}} +if(_b_.isinstance(other,_b_.float)){other=_b_.float.numerator(other) +if(self >=0){return new Number(Math.pow(self,other))} else{ return _b_.complex.__pow__($B.make_complex(self,0),other)}}else if(_b_.isinstance(other,_b_.complex)){var preal=Math.pow(self,other.$real),ln=Math.log(self) return $B.make_complex(preal*Math.cos(ln),preal*Math.sin(ln))} -if(hasattr(other,"__rpow__")){return $B.$getattr(other,"__rpow__")(self)} +var rpow=$B.$getattr(other,"__rpow__",_b_.None) +if(rpow !==_b_.None){return rpow(self)} $err("**",other)} int.__repr__=function(self){if(self===int){return ""} return self.toString()} @@ -9646,14 +10301,15 @@ $err('>>',other)} int.__setattr__=function(self,attr,value){if(typeof self=="number"){if(int.$factory[attr]===undefined){throw _b_.AttributeError.$factory( "'int' object has no attribute '"+attr+"'")}else{throw _b_.AttributeError.$factory( "'int' object attribute '"+attr+"' is read-only")}} -self.__dict__.$string_dict[attr]=value +_b_.dict.$setitem(self.__dict__,attr,value) return _b_.None} int.__str__=int.__repr__ int.__truediv__=function(self,other){if(_b_.isinstance(other,int)){other=int_value(other) if(other==0){throw _b_.ZeroDivisionError.$factory("division by zero")} if(other.__class__===$B.long_int){return new Number(self/parseInt(other.value))} return new Number(self/other)} -if(_b_.isinstance(other,_b_.float)){if(!other.valueOf()){throw _b_.ZeroDivisionError.$factory("division by zero")} +if(_b_.isinstance(other,_b_.float)){other=_b_.float.numerator(other) +if(!other.valueOf()){throw _b_.ZeroDivisionError.$factory("division by zero")} return new Number(self/other)} if(_b_.isinstance(other,_b_.complex)){var cmod=other.$real*other.$real+other.$imag*other.$imag if(cmod==0){throw _b_.ZeroDivisionError.$factory("division by zero")} @@ -9687,8 +10343,10 @@ var $op_func=function(self,other){if(_b_.isinstance(other,int)){other=int_value( if(typeof other=="number"){var res=self.valueOf()-other.valueOf() if(res > $B.min_int && res < $B.max_int){return res} else{return $B.long_int.__sub__($B.long_int.$factory(self),$B.long_int.$factory(other))}}else if(typeof other=="boolean"){return other ? self-1 :self}else{return $B.long_int.__sub__($B.long_int.$factory(self),$B.long_int.$factory(other))}} -if(_b_.isinstance(other,_b_.float)){return new Number(self-other)} -if(_b_.isinstance(other,_b_.complex)){return $B.make_complex(self-other.$real,-other.$imag)} +if(_b_.isinstance(other,_b_.float)){return new Number(self-_b_.float.numerator(other))} +if(_b_.isinstance(other,_b_.complex)){if(other.$imag==0){ +return $B.make_complex(self-other.$real,0)} +return $B.make_complex(self-other.$real,-other.$imag)} if(_b_.isinstance(other,_b_.bool)){var bool_value=0; if(other.valueOf()){bool_value=1} return self-bool_value} @@ -9703,7 +10361,7 @@ opf=opf.replace(new RegExp("sub","gm"),$ops[$op]) eval("int.__"+$ops[$op]+"__ = "+opf)} var $comp_func=function(self,other){if(other.__class__===$B.long_int){return $B.long_int.__lt__(other,$B.long_int.$factory(self))} if(_b_.isinstance(other,int)){other=int_value(other) -return self.valueOf()> other.valueOf()}else if(_b_.isinstance(other,_b_.float)){return self.valueOf()> other.valueOf()}else if(_b_.isinstance(other,_b_.bool)){return self.valueOf()> _b_.bool.__hash__(other)} +return self.valueOf()> other.valueOf()}else if(_b_.isinstance(other,_b_.float)){return self.valueOf()> _b_.float.numerator(other)}else if(_b_.isinstance(other,_b_.bool)){return self.valueOf()> _b_.bool.__hash__(other)} if(_b_.hasattr(other,"__int__")||_b_.hasattr(other,"__index__")){return int.__gt__(self,$B.$GetInt(other))} return _b_.NotImplemented} $comp_func+="" @@ -9723,11 +10381,10 @@ int.$factory=function(value,base){ if(value===undefined){return 0} if(typeof value=="number" && (base===undefined ||base==10)){return parseInt(value)} -if(base !==undefined){if(! _b_.isinstance(value,[_b_.str,_b_.bytes,_b_.bytearray])){throw _b_.TypeError.$factory( -"int() can't convert non-string with explicit base")}} if(_b_.isinstance(value,_b_.complex)){throw _b_.TypeError.$factory("can't convert complex to int")} var $ns=$B.args("int",2,{x:null,base:null},["x","base"],arguments,{"base":10},null,null),value=$ns["x"],base=$ns["base"] -if(_b_.isinstance(value,_b_.float)&& base==10){if(value < $B.min_int ||value > $B.max_int){return $B.long_int.$from_float(value)} +if(_b_.isinstance(value,_b_.float)&& base==10){value=_b_.float.numerator(value) +if(value < $B.min_int ||value > $B.max_int){return $B.long_int.$from_float(value)} else{return value > 0 ? Math.floor(value):Math.ceil(value)}} if(!(base >=2 && base <=36)){ if(base !=0){throw _b_.ValueError.$factory("invalid base")}} @@ -9748,7 +10405,7 @@ if(_b_.isinstance(value,_b_.str)){value=value.valueOf()} if(typeof value=="string"){var _value=value.trim() if(_value.length==2 && base==0 && (_value=="0b" ||_value=="0o" ||_value=="0x")){throw _b_.ValueError.$factory("invalid value")} -if(_value.length >2){var _pre=_value.substr(0,2).toUpperCase() +if(_value.length > 2){var _pre=_value.substr(0,2).toUpperCase() if(base==0){if(_pre=="0B"){base=2} if(_pre=="0O"){base=8} if(_pre=="0X"){base=16}}else if(_pre=="0X" && base !=16){invalid(_value,base)} @@ -9764,21 +10421,11 @@ var res=parseInt(value,base) if(res < $B.min_int ||res > $B.max_int){return $B.long_int.$factory(value,base)} return res} if(_b_.isinstance(value,[_b_.bytes,_b_.bytearray])){return int.$factory($B.$getattr(value,"decode")("latin-1"),base)} -var $int=$B.$getattr(value,"__int__",_b_.None) -if($int !==_b_.None){return $int()} -var $index=$B.$getattr(value,"__index__",_b_.None) -if($index !==_b_.None){return $index()} -var $trunc=$B.$getattr(value,"__trunc__",_b_.None) -if($trunc !==_b_.None){var res=$truc(),int_func=$int -if(int_func===_b_.None){throw _b_.TypeError.$factory("__trunc__ returned non-Integral (type "+ -$B.class_name(res)+")")} -var res=int_func() -if(_b_.isinstance(res,int)){return int_value(res)} -throw _b_.TypeError.$factory("__trunc__ returned non-Integral (type "+ -$B.class_name(res)+")")} -throw _b_.TypeError.$factory( +var num_value=$B.to_num(value,["__int__","__index__","__trunc__"]) +if(num_value===null){throw _b_.TypeError.$factory( "int() argument must be a string, a bytes-like "+ "object or a number, not '"+$B.class_name(value)+"'")} +return num_value} $B.set_func_names(int,"builtins") _b_.int=int $B.$bool=function(obj){ @@ -9791,24 +10438,30 @@ if(obj){return true} return false default: if(obj.$is_class){return true} -var missing={},bool_func=$B.$getattr(obj,"__bool__",missing) -if(bool_func===missing){try{return $B.$getattr(obj,"__len__")()> 0} -catch(err){return true}}else{return bool_func()}}} +var klass=obj.__class__ ||$B.get_class(obj),missing={},bool_method=$B.$getattr(klass,"__bool__",missing) +if(bool_method===missing){try{return _b_.len(obj)> 0} +catch(err){return true}}else{var res=$B.$call(bool_method)(obj) +if(res !==true && res !==false){throw _b_.TypeError.$factory("__bool__ should return "+ +"bool, returned "+$B.class_name(res))} +return res}}} var bool={__bases__:[int],__class__:_b_.type,__mro__:[int,_b_.object],$infos:{__name__:"bool",__module__:"builtins"},$is_class:true,$native:true} var methods=$B.op2method.subset("operations","binary","comparisons","boolean") for(var op in methods){var method="__"+methods[op]+"__" bool[method]=(function(op){return function(self,other){var value=self ? 1 :0 if(int[op]!==undefined){return int[op](value,other)}}})(method)} -bool.__and__=function(self,other){return $B.$bool(int.__and__(self,other))} +bool.__and__=function(self,other){if(_b_.isinstance(other,bool)){return self && other}else if(_b_.isinstance(other,int)){return int.__and__(bool.__index__(self),int.__index__(other))} +return _b_.NotImplemented} bool.__hash__=bool.__index__=bool.__int__=function(self){if(self.valueOf())return 1 return 0} bool.__neg__=function(self){return-$B.int_or_bool(self)} -bool.__or__=function(self,other){return $B.$bool(int.__or__(self,other))} +bool.__or__=function(self,other){if(_b_.isinstance(other,bool)){return self ||other}else if(_b_.isinstance(other,int)){return int.__or__(bool.__index__(self),int.__index__(other))} +return _b_.NotImplemented} bool.__pos__=$B.int_or_bool bool.__repr__=bool.__str__=function(self){return self ? "True" :"False"} bool.__setattr__=function(self,attr){if(_b_.dir(self).indexOf(attr)>-1){var msg="attribute '"+attr+"' of 'int' objects is not writable"}else{var msg="'bool' object has no attribute '"+attr+"'"} throw _b_.AttributeError.$factory(msg)} -bool.__xor__=function(self,other){return self.valueOf()!=other.valueOf()} +bool.__xor__=function(self,other){if(_b_.isinstance(other,bool)){return self ^ other ? true :false}else if(_b_.isinstance(other,int)){return int.__xor__(bool.__index__(self),int.__index__(other))} +return _b_.NotImplemented} bool.$factory=function(){ var $=$B.args("bool",1,{x:null},["x"],arguments,{x:false},null,null) return $B.$bool($.x)} @@ -9818,8 +10471,11 @@ $B.set_func_names(bool,"builtins")})(__BRYTHON__) ;(function($B){ var bltns=$B.InjectBuiltins() eval(bltns) +try{eval("window")}catch(err){window=self} var long_int={__class__:_b_.type,__mro__:[int,object],$infos:{__module__:"builtins",__name__:"int"},$is_class:true,$native:true,$descriptors:{"numerator":true,"denominator":true,"imag":true,"real":true}} function add_pos(v1,v2){ +if(window.BigInt){return{ +__class__:long_int,value:(BigInt(v1)+BigInt(v2)).toString(),pos:true}} var res="",carry=0,iself=v1.length,sv=0,x for(var i=v2.length-1;i >=0 ;i--){iself-- if(iself < 0){sv=0}else{sv=parseInt(v1.charAt(iself))} @@ -9834,6 +10490,30 @@ carry=parseInt(x.charAt(0))} else{res=x+res;carry=0}} if(carry){res=carry+res} return{__class__:long_int,value:res,pos:true}} +var len=((Math.pow(2,53)-1)+'').length-1 +function binary_pos(t){var nb_chunks=Math.ceil(t.length/len),chunks=[],pos,start,nb,bin=[] +for(var i=0;i < nb_chunks;i++){pos=t.length-(i+1)*len +start=Math.max(0,pos) +nb=pos-start +chunks.push(t.substr(start,len+nb))} +chunks=chunks.reverse() +chunks.forEach(function(chunk,i){chunks[i]=parseInt(chunk)}) +var rest +var carry=Math.pow(10,15) +while(chunks[chunks.length-1]> 0){chunks.forEach(function(chunk,i){rest=chunk % 2 +chunks[i]=Math.floor(chunk/2) +if(rest && i < chunks.length-1){chunks[i+1]+=carry}}) +bin.push(rest) +if(chunks[0]==0){chunks.shift()}} +bin=bin.reverse().join('') +return bin} +function binary(obj){var bpos=binary_pos(obj.value) +if(obj.pos){return bpos} +var res='' +for(var i=0,len=bpos.length;i < len;i++){res+=bpos.charAt(i)=="0" ? "1":"0"} +var add1=add_pos(res,"1").value +add1=res.substr(0,res.length-add1.length)+add1 +return add1} function check_shift(shift){ if(! isinstance(shift,long_int)){throw TypeError.$factory("shift must be int, not '"+ $B.class_name(shift)+"'")} @@ -9848,7 +10528,36 @@ else if(v1.length < v2.length){return-1} else{if(v1 > v2){return 1} else if(v1 < v2){return-1}} return 0} +function divmod_by_safe_int(t,n){ +if(n==1){return[t,0]} +var len=(Math.floor((Math.pow(2,53)-1)/n)+'').length-1,nb_chunks=Math.ceil(t.length/len), +chunks=[],pos,start,nb,in_base=[] +for(var i=0;i < nb_chunks;i++){pos=t.length-(i+1)*len +start=Math.max(0,pos) +nb=pos-start +chunks.push(t.substr(start,len+nb))} +chunks=chunks.reverse() +chunks.forEach(function(chunk,i){chunks[i]=parseInt(chunk)}) +var rest,carry=Math.pow(10,len),x +chunks.forEach(function(chunk,i){rest=chunk % n +chunks[i]=Math.floor(chunk/n) +if(i < chunks.length-1){ +chunks[i+1]+=carry*rest}}) +while(chunks[0]==0){chunks.shift() +if(chunks.length==0){return[0,rest]}} +x=chunks[0]+'' +chunks.forEach(function(chunk,i){if(i > 0){ +x+="0".repeat(len-chunk.toString().length)+chunk}}) +return[x,rest]} function divmod_pos(v1,v2){ +if(window.BigInt){var a={__class__:long_int,value:(BigInt(v1)/BigInt(v2)).toString(),pos:true},b={__class__:long_int,value:(BigInt(v1)% BigInt(v2)).toString(),pos:true} +return[a,b]} +var iv1=parseInt(v1),iv2=parseInt(v2),res1 +if(iv1 < $B.max_int && iv2 < $B.max_int){var rest=iv1 % iv2,quot=Math.floor(iv1/iv2).toString() +var res1=[{__class__:long_int,value:quot.toString(),pos:true},{__class__:long_int,value:rest.toString(),pos:true} +] +return res1}else if(iv2 < $B.max_int){var res_safe=divmod_by_safe_int(v1,iv2) +return[long_int.$factory(res_safe[0]),long_int.$factory(res_safe[1])]} var quotient,mod if(comp_pos(v1,v2)==-1){ quotient="0" @@ -9860,6 +10569,8 @@ var right=v1.substr(left.length) var mv2={} while(true){ var candidate=Math.floor(parseInt(left)/parseInt(v2))+"" +if(candidate=="10"){ +candidate="9"} if(mv2[candidate]===undefined){mv2[candidate]=mul_pos(v2,candidate).value} if(comp_pos(left,mv2[candidate])==-1){ candidate-- @@ -9876,7 +10587,10 @@ for(var i=0;i < nb;i++){var pos=len-size*(i+1) if(pos < 0){size+=pos;pos=0} chunks.push(parseInt(s.substr(pos,size)))} return chunks} -function mul_pos(x,y){ +function mul_pos(x,y){if(window.BigInt){return{__class__:long_int,value:(BigInt(x)*BigInt(y)).toString(),pos:true}} +var ix=parseInt(x),iy=parseInt(y),z=ix*iy +if(z < $B.max_int){return{ +__class__:long_int,value:z.toString(),pos:true}} var chunk_size=6,cx=split_chunks(x,chunk_size),cy=split_chunks(y,chunk_size) var products={},len=cx.length+cy.length for(var i=0;i < len-1;i++){products[i]=0} @@ -9893,20 +10607,34 @@ while(products[i]!==undefined){s=products[i].toString() if(products[i+1]!==undefined){s="0".repeat(chunk_size-s.length)+s} result=s+result i++} -return long_int.$factory(result)} +try{return long_int.$factory(result)}catch(err){console.log(x,y,products,result) +throw err}} function sub_pos(v1,v2){ +if(window.BigInt){return{ +__class__:long_int,value:(BigInt(v1)-BigInt(v2)).toString(),pos:true}} var res="",carry=0,i1=v1.length,sv=0,x for(var i=v2.length-1;i >=0;i--){i1-- sv=parseInt(v1.charAt(i1)) x=(sv-carry-parseInt(v2.charAt(i))) +if(isNaN(x)){console.log("x is NaN",v1.length,v2.length,i,i1,sv,carry,i,v2.charAt(i))} if(x < 0){res=(10+x)+res;carry=1} else{res=x+res;carry=0}} +if(res.startsWith("NaN")){alert(res)} while(i1 > 0){i1-- x=(parseInt(v1.charAt(i1))-carry) if(x < 0){res=(10+x)+res;carry=1} else{res=x+res;carry=0}} while(res.charAt(0)=="0" && res.length > 1){res=res.substr(1)} +if(res.startsWith("NaN")){console.log("hoho !!",v1,v2,v1 >=v2,res)} return{__class__:long_int,value:res,pos:true}} +function to_BigInt(x){var res=$B.BigInt(x.value) +if(x.pos){return res} +return-res} +function from_BigInt(y){var pos=y >=0 +y=y.toString() +y=y.endsWith("n")? y.substr(0,y.length-1):y +y=y.startsWith('-')? y.substr(1):y +return intOrLong({__class__:long_int,value:y,pos:pos})} long_int.$from_float=function(value){var s=Math.abs(value).toString(),v=s if(s.search("e")>-1){var t=/-?(\d)(\.\d+)?e([+-])(\d*)/.exec(s),n1=t[1],n2=t[2],pos=t[3],exp=t[4] if(pos=="+"){if(n2===undefined){v=n1+"0".repeat(exp-1)}else{v=n1+n2+"0".repeat(exp-1-n2.length)}}} @@ -9914,7 +10642,8 @@ return{__class__:long_int,value:v,pos:value >=0}} long_int.__abs__=function(self){return{__class__:long_int,value:self.value,pos:true}} long_int.__add__=function(self,other){if(isinstance(other,_b_.float)){return _b_.float.$factory(parseInt(self.value)+other.value)} if(typeof other=="number"){other=long_int.$factory(_b_.str.$factory(other))}else if(other.__class__ !==long_int){if(isinstance(other,_b_.bool)){other=long_int.$factory(other ? 1 :0)}else if(isinstance(other,int)){ -other=long_int.$factory(_b_.str.$factory(_b_.int.__index__(other)))}} +other=long_int.$factory(_b_.str.$factory(_b_.int.__index__(other)))}else{return _b_.NotImplemented}} +if($B.BigInt){} var res if(self.pos && other.pos){ return add_pos(self.value,other.value)}else if(! self.pos && ! other.pos){ @@ -9937,32 +10666,41 @@ res=sub_pos(self.value,other.value) res.pos=false break case 0: -res={__class__:ong_int,value:0,pos:true} +res={__class__:long_int,value:0,pos:true} break case-1: res=sub_pos(other.value,self.value) break} return intOrLong(res)}} long_int.__and__=function(self,other){if(typeof other=="number"){other=long_int.$factory(_b_.str.$factory(other))} -var v1=long_int.__index__(self),v2=long_int.__index__(other) -if(v1.length < v2.length){var temp=v2;v2=v1;v1=temp} -if(v2.charAt(0)=="1"){v2="1".repeat(v1.length-v2.length)+v2} -var start=v1.length-v2.length,res="" -for(var i=0;i < v2.length;i++){if(v1.charAt(start+i)=="1" && v2.charAt(i)=="1"){res+="1"} -else{res+="0"}} -return intOrLong(long_int.$factory(res,2))} +if($B.BigInt){return from_BigInt(to_BigInt(self)& to_BigInt(other))} +var v1=self.value,v2=other.value,temp1,temp2,res="" +var neg=(! self.pos)&&(! other.pos) +if(neg){self=long_int.__neg__(self) +other=long_int.__neg__(other)} +var b1=binary(self),len1=b1.length,b2=binary(other),len2=b2.length,i=1,res='',x1,x2 +while(true){if(i > len1 && i > len2){break} +if(i > len1){x1=self.pos ? "0" :"1"}else{x1=b1.charAt(len1-i)} +if(i > len2){x2=other.pos ? "0" :"1"}else{x2=b2.charAt(len2-i)} +if(x1=="1" && x2=="1"){res="1"+res}else{res="0"+res} +i++} +while(res.charAt(0)=="0"){res=res.substr(1)} +res=$B.long_int.$factory(res,2) +if(neg){res.pos=false} +return intOrLong(res)} long_int.__divmod__=function(self,other){if(typeof other=="number"){other=long_int.$factory(_b_.str.$factory(other))} var dm=divmod_pos(self.value,other.value) if(self.pos !==other.pos){if(dm[0].value !="0"){dm[0].pos=false} if(dm[1].value !="0"){ dm[0]=long_int.__sub__(dm[0],long_int.$factory("1")) -dm[1]=long_int.__add__(dm[1],long_int.$factory("1"))}} -return[intOrLong(dm[0]),intOrLong(dm[1])]} +dm[1]=long_int.__sub__(self,long_int.__mul__(other,long_int.$factory(dm[0])))}} +return $B.fast_tuple([intOrLong(dm[0]),intOrLong(dm[1])])} long_int.__eq__=function(self,other){if(typeof other=="number"){other=long_int.$factory(_b_.str.$factory(other))} return self.value==other.value && self.pos==other.pos} long_int.__float__=function(self){return new Number(parseFloat(self.value))} long_int.__floordiv__=function(self,other){if(isinstance(other,_b_.float)){return _b_.float.$factory(parseInt(self.value)/other)} -if(typeof other=="number"){other=long_int.$factory(_b_.str.$factory(other))} +if(typeof other=="number"){var t=self.value,res=divmod_by_safe_int(t,other),pos=other > 0 ? self.pos :!self.pos +return{__class__:long_int,value:res[0],pos:pos}} return intOrLong(long_int.__divmod__(self,other)[0])} long_int.__ge__=function(self,other){if(typeof other=="number"){other=long_int.$factory(_b_.str.$factory(other))} if(self.pos !=other.pos){return ! other.pos} @@ -9993,10 +10731,13 @@ else if(self.value.length < other.value.length){return self.pos} else{return self.pos ? self.value <=other.value : self.value >=other.value}} long_int.__lt__=function(self,other){return !long_int.__ge__(self,other)} -long_int.__lshift__=function(self,shift){var is_long=shift.__class__===long_int,shift_safe +long_int.__lshift__=function(self,shift){if(window.BigInt){if(shift.__class__==long_int){shift=shift.value} +return intOrLong({__class__:long_int,value:(BigInt(self.value)<< BigInt(shift)).toString(),pos:self.pos})} +var is_long=shift.__class__===long_int,shift_safe if(is_long){var shift_value=parseInt(shift.value) if(shift_value < 0){throw _b_.ValueError.$factory('negative shift count')} -if(shift_value < $B.max_int){shift_safe=true;shift=shift_value}} +if(shift_value < $B.max_int){shift_safe=true +shift=shift_value}} if(shift_safe){if(shift_value==0){return self}}else{shift=long_int.$factory(shift) if(shift.value=="0"){return self}} var res=self.value @@ -10019,12 +10760,14 @@ if($B.rich_comp("__eq__",other,0)){return NaN} else if(_b_.getattr(other,"__gt__")(0)){return self} else{return-self}} if(isinstance(other,_b_.float)){return _b_.float.$factory(parseInt(self.value)*other)} +if(typeof other=="number"){other=long_int.$factory(other)} other_value=other.value other_pos=other.pos if(other.__class__ !==long_int && isinstance(other,int)){ var value=int.__index__(other) other_value=_b_.str.$factory(value) other_pos=value > 0} +if($B.BigInt){return from_BigInt(to_BigInt(self)*to_BigInt(other))} var res=mul_pos(self.value,other_value) if(self.pos==other_pos){return intOrLong(res)} res.pos=false @@ -10040,18 +10783,38 @@ for(var i=0;i < v2.length;i++){if(v1.charAt(start+i)=="1" ||v2.charAt(i)=="1"){r else{res+="0"}} return intOrLong(long_int.$factory(res,2))} long_int.__pos__=function(self){return self} -long_int.__pow__=function(self,power,z){if(typeof power=="number"){power=long_int.$factory(_b_.str.$factory(power))}else if(isinstance(power,int)){ -power=long_int.$factory(_b_.str.$factory(_b_.int.__index__(power)))}else if(! isinstance(power,long_int)){var msg="power must be a LongDict, not '" +long_int.__pow__=function(self,power,z){if(typeof power=="number"){power=long_int.$from_int(power)}else if(isinstance(power,int)){ +power=long_int.$factory(_b_.str.$factory(_b_.int.__index__(power)))}else if(! isinstance(power,long_int)){var msg="power must be an integer, not '" throw TypeError.$factory(msg+$B.class_name(power)+"'")} if(! power.pos){if(self.value=="1"){return self} return long_int.$factory("0")}else if(power.value=="0"){return long_int.$factory("1")} -var res={__class__:long_int,value:self.value,pos:self.pos},pow=power.value -while(true){pow=sub_pos(pow,"1").value -if(pow=="0"){break} -res=long_int.$factory(long_int.__mul__(res,self)) -if(z !==undefined){res=long_int.__mod__(res,z)}} -return intOrLong(res)} -long_int.__rshift__=function(self,shift){shift=long_int.$factory(shift) +if($B.BigInt){var s=$B.BigInt(self.value),b=$B.BigInt(1),x=$B.BigInt(power.value),z=z===undefined ? z :typeof z=="number" ? $B.BigInt(z): +$B.BigInt(z.value) +if(z===undefined){return{ +__class__:long_int,value:(s**x).toString(),pos:true}} +while(x > 0){if(x % $B.BigInt(2)==1){b=b*s} +x=x/$B.BigInt(2) +if(x > 0){s=s*s} +if(z !==undefined){b=b % z}} +return{__class__:long_int,value:b.toString(),pos:true}} +var b={__class__:long_int,value:"1",pos:true},s=self,pow=power.value,temp +while(true){if(typeof pow=="string" && parseInt(pow)< $B.max_int){pow=parseInt(pow)} +if(pow==0){break}else if(typeof pow=="string"){if(parseInt(pow.charAt(pow.length-1))% 2==1){b=long_int.__mul__(b,s)} +pow=long_int.__floordiv__(pow,2)}else{if(pow % 2==1){if(typeof b=="number" && typeof s=="number" && +(temp=b*s)< $B.max_int){b=temp}else{b=long_int.__mul__(long_int.$factory(b),long_int.$factory(s))}} +pow=Math.floor(pow/2)} +if(pow > 0){if(typeof s=="number" &&(temp=s*s)< $B.max_int){s=temp}else{s=long_int.$factory(s) +s=long_int.__mul__(s,s)}} +if(z !==undefined){b=long_int.__mod__(b,z)}} +return intOrLong(b)} +long_int.__rshift__=function(self,shift){if(window.BigInt){if(shift.__class__===long_int){shift=shift.value} +return intOrLong( +{__class__:long_int,value:(BigInt(self.value)>> BigInt(shift)).toString(),pos:self.pos} +)} +if(typeof shift=="number"){var pow2=Math.pow(2,shift) +if(pow2 < $B.max_int){var res=divmod_by_safe_int(self.value,pow2) +return intOrLong({__class__:long_int,value:res[0],pos:self.pos})}} +shift=long_int.$factory(shift) if(shift.value=="0"){return self} var res=self.value while(true){res=divmod_pos(res,"2")[0].value @@ -10062,8 +10825,10 @@ return intOrLong({__class__:long_int,value:res,pos:self.pos})} long_int.__str__=long_int.__repr__=function(self){var res="" if(! self.pos){res+='-'} return res+self.value} -long_int.__sub__=function(self,other){if(isinstance(other,_b_.float)){return _b_.float.$factory(parseInt(self.value)-other.value)} +long_int.__sub__=function(self,other){if(isinstance(other,_b_.float)){other=other instanceof Number ? other :other.$brython_value +return _b_.float.$factory(parseInt(self.value)-other)} if(typeof other=="number"){other=long_int.$factory(_b_.str.$factory(other))} +if($B.BigInt){} var res if(self.pos && other.pos){switch(comp_pos(self.value,other.value)){case 1: res=sub_pos(self.value,other.value) @@ -10100,11 +10865,13 @@ for(var i=0;i < v2.length;i++){if(v1.charAt(start+i)=="1" && v2.charAt(i)=="0"){ else if(v1.charAt(start+i)=="0" && v2.charAt(i)=="1"){res+="1"} else{res+="0"}} return intOrLong(long_int.$factory(res,2))} +long_int.bit_length=function(self){return binary(self).length} long_int.numerator=function(self){return self} long_int.denominator=function(self){return _b_.int.$factory(1)} long_int.imag=function(self){return _b_.int.$factory(0)} long_int.real=function(self){return self} long_int.to_base=function(self,base){ +if(base==2){return binary_pos(self.value)} var res="",v=self.value while(v > 0){var dm=divmod_pos(v,base.toString()) res=parseInt(dm[1].value).toString(base)+res @@ -10129,6 +10896,7 @@ function intOrLong(long){ var v=parseInt(long.value)*(long.pos ? 1 :-1) if(v > MIN_SAFE_INTEGER && v < MAX_SAFE_INTEGER){return v} return long} +long_int.$from_int=function(value){return{__class__:long_int,value:value.toString(),pos:value > 0}} long_int.$factory=function(value,base){if(arguments.length > 2){throw _b_.TypeError.$factory("long_int takes at most 2 arguments ("+ arguments.length+" given)")} if(base===undefined){base=10} @@ -10136,16 +10904,26 @@ else if(!isinstance(base,int)){throw TypeError.$factory("'"+$B.class_name(base)+ "' object cannot be interpreted as an integer")} if(base < 0 ||base==1 ||base > 36){throw ValueError.$factory( "long_int.$factory() base must be >= 2 and <= 36")} -if(isinstance(value,_b_.float)){if(value===Number.POSITIVE_INFINITY || +if(typeof value=="number"){var pos=value > 0,value=Math.abs(value),res +if(isSafeInteger(value)){res=long_int.$from_int(value)} +else if(value.constructor==Number){var s=value.toString(),pos_exp=s.search("e") +if(pos_exp >-1){var mant=s.substr(0,pos_exp),exp=parseInt(s.substr(pos_exp+1)),point=mant.search(/\./) +if(point >-1){var nb_dec=mant.substr(point+1).length +if(nb_dec > exp){var res=mant.substr(0,point)+ +mant.substr(point+1).substr(0,exp) +res=long_int.$from_int(res)}else{var res=mant.substr(0,point)+ +mant.substr(point+1)+'0'.repeat(exp-nb_dec) +res=long_int.$from_int(res)}}else{res=long_int.$from_int(mant+'0'.repeat(exp))}}else{var point=s.search(/\./) +if(point >-1){res=long_int.$from_int(s.substr(0,point))}else{res=long_int.$from_int(s)}}} +else{throw ValueError.$factory( +"argument of long_int is not a safe integer")} +res.pos=pos +return res}else if(isinstance(value,_b_.float)){if(value===Number.POSITIVE_INFINITY || value===Number.NEGATIVE_INFINITY){return value} if(value >=0){value=new Number(Math.round(value.value))} else{value=new Number(Math.ceil(value.value))}}else if(isinstance(value,_b_.bool)){if(value.valueOf()){return int.$factory(1)} -return int.$factory(0)} -if(typeof value=="number"){if(isSafeInteger(value)){value=value.toString()} -else if(value.constructor==Number){value=value.toString()} -else{throw ValueError.$factory( -"argument of long_int is not a safe integer")}}else if(value.__class__===long_int){return value}else if(isinstance(value,int)){ -value=value.$value+""}else if(isinstance(value,_b_.bool)){value=_b_.bool.__int__(value)+""}else if(typeof value !="string"){throw ValueError.$factory( +return int.$factory(0)}else if(value.__class__===long_int){return value}else if(isinstance(value,int)){ +value=value.$brython_value+""}else if(isinstance(value,_b_.bool)){value=_b_.bool.__int__(value)+""}else if(typeof value !="string"){throw ValueError.$factory( "argument of long_int must be a string, not "+ $B.class_name(value))} var has_prefix=false,pos=true,start=0 @@ -10160,7 +10938,14 @@ throw ValueError.$factory( while(start < value.length-1 && value.charAt(start)=="0"){start++} value=value.substr(start) var is_digits=digits(base),point=-1 -for(var i=0;i < value.length;i++){if(value.charAt(i)=="." && point==-1){point=i} +for(var i=0;i < value.length;i++){if(value.charAt(i)=="." && point==-1){point=i}else if(false){ +var mant=value.substr(0,i) +if(/^[+-]?\d+$/.exec(value.substr(i+1))){exp=parseInt(value.substr(i+1))}else{throw Error("wrong exp "+value.substr(i+1))} +if(point !=-1){mant=mant.substr(0,point)+mant.substr(point+1) +exp=exp+point-1} +point=-1 +value=mant+"0".repeat(exp-mant.length) +break} else if(! is_digits[value.charAt(i)]){throw ValueError.$factory( 'long_int argument is not a valid number: "'+value+'"')}} if(point !=-1){value=value.substr(0,point)} @@ -10172,7 +10957,8 @@ coef=mul_pos(coef,base.toString()).value} return v10} return{__class__:long_int,value:value,pos:pos}} $B.set_func_names(long_int,"builtins") -$B.long_int=long_int})(__BRYTHON__) +$B.long_int=long_int +$B.fast_long_int=function(value,pos){return{__class__:$B.long_int,value:value,pos:pos}}})(__BRYTHON__) ; ;(function($B){var _b_=$B.builtins function $UnsupportedOpType(op,class1,class2){throw _b_.TypeError.$factory("unsupported operand type(s) for "+ @@ -10187,6 +10973,7 @@ if(!isFinite(mag)&& _rf && _if){ throw _b_.OverflowError.$factory("absolute value too large")} return mag} complex.__bool__=function(self){return(self.$real !=0 ||self.$imag !=0)} +complex.__complex__=function(self){return self} complex.__eq__=function(self,other){if(_b_.isinstance(other,complex)){return self.$real.valueOf()==other.$real.valueOf()&& self.$imag.valueOf()==other.$imag.valueOf()} if(_b_.isinstance(other,_b_.int)){if(self.$imag !=0){return false} @@ -10230,11 +11017,14 @@ $imag=0} res={__class__:complex,$real:$real ||0,$imag:$imag ||0} return res}} $imag=$imag===missing ? 0 :$imag -if(arguments.length==1 && $real.__class__===complex && $imag==0){return $real} -if((_b_.isinstance($real,_b_.float)||_b_.isinstance($real,_b_.int))&& -(_b_.isinstance($imag,_b_.float)||_b_.isinstance($imag,_b_.int))){res={__class__:complex,$real:$real,$imag:$imag} +if(arguments.length==2 && $real.__class__===complex && $imag==0){return $real} +if(_b_.isinstance($real,[_b_.float,_b_.int])&& +_b_.isinstance($imag,[_b_.float,_b_.int])){res={__class__:complex,$real:$real,$imag:$imag} return res} -$real=_convert($real) +var real_to_num=$B.to_num($real,["__complex__","__float__","__index__"]) +if(real_to_num===null){throw _b_.TypeError.$factory("complex() first argument must be a "+ +" string or a number, not '"+$B.class_name($real)+"'")} +$real=real_to_num $imag=_convert($imag) if(! _b_.isinstance($real,_b_.float)&& ! _b_.isinstance($real,_b_.int)&& ! _b_.isinstance($real,_b_.complex)){throw _b_.TypeError.$factory("complex() argument must be a string "+ @@ -10259,11 +11049,16 @@ var pw=Math.pow(exp.norm,x)*Math.pow(Math.E,-y*angle),theta=y*Math.log(exp.norm) return make_complex(pw*Math.cos(theta),pw*Math.sin(theta))}else{throw _b_.TypeError.$factory("unsupported operand type(s) "+ "for ** or pow(): 'complex' and '"+ $B.class_name(other)+"'")}} -complex.__str__=complex.__repr__=function(self){if(self.$real==0){if(1/self.$real < 0){if(self.$imag < 0){return "(-0"+self.$imag+"j)"}else if(self.$imag==0 && 1/self.$imag < 0){return "(-0-"+self.$imag+"j)"}else return "(-0+"+self.$imag+"j)"}else{if(self.$imag==0 && 1/self.$imag < 0){return "-"+self.$imag+"j"}else{return self.$imag+"j"}}} -if(self.$imag > 0){return "("+self.$real+"+"+self.$imag+"j)"} -if(self.$imag==0){if(1/self.$imag < 0){return "("+self.$real+"-"+self.$imag+"j)"} -return "("+self.$real+"+"+self.$imag+"j)"} -return "("+self.$real+"-"+(-self.$imag)+"j)"} +complex.__str__=complex.__repr__=function(self){var real=_b_.str.$factory(self.$real),imag=_b_.str.$factory(self.$imag) +if(self.$real instanceof Number && self.$real==parseInt(self.$real)){real=_b_.str.$factory(parseInt(self.$real))} +if(self.$imag instanceof Number && self.$imag==parseInt(self.$imag)){imag=_b_.str.$factory(parseInt(self.$imag)) +if(self.$imag==0 && 1/self.$imag===-Infinity){imag="-0"}} +if(self.$real==0){if(1/self.$real < 0){if(imag.startsWith('-')){return "-0"+imag+"j"} +return "-0+"+imag+"j"}else{return imag+"j"}} +if(self.$imag > 0 ||isNaN(self.$imag)){return "("+real+"+"+imag+"j)"} +if(self.$imag==0){if(1/self.$imag < 0){return "("+real+"-0j)"} +return "("+real+"+0j)"} +return "("+real+"-"+_b_.str.$factory(-self.$imag)+"j)"} complex.__sqrt__=function(self){if(self.$imag==0){return complex(Math.sqrt(self.$real))} var r=self.$real,i=self.$imag,_a=Math.sqrt((r+sqrt)/2),_b=Number.sign(i)*Math.sqrt((-r+sqrt)/2) return make_complex(_a,_b)} @@ -10307,10 +11102,11 @@ complex.real.setter=function(){throw _b_.AttributeError.$factory("readonly attri complex.imag=function(self){return new Number(self.$imag)} complex.imag.setter=function(){throw _b_.AttributeError.$factory("readonly attribute")} var _real=1,_real_mantissa=2,_sign=3,_imag=4,_imag_mantissa=5,_j=6 -var type_conversions=["__complex__","__float__","__int__"] -var _convert=function(num){for(var i=0;i < type_conversions.length;i++){var missing={},tc=getattr(num,type_conversions[i],missing) -if(tc !==missing){return tc()}} -return num} +var type_conversions=["__complex__","__float__","__index__"] +var _convert=function(num){var klass=num.__class__ ||$B.get_class(num) +for(var i=0;i < type_conversions.length;i++){var missing={},method=$B.$getattr(klass,type_conversions[i],missing) +if(method !==missing){return method(num)}} +return null} var make_complex=$B.make_complex=function(real,imag){return{ __class__:complex,$real:real,$imag:imag}} complex.$factory=function(){return complex.__new__(complex,...arguments)} @@ -10318,8 +11114,6 @@ $B.set_func_names(complex,"builtins") _b_.complex=complex})(__BRYTHON__) ; ;(function($B){ -var bltns=$B.InjectBuiltins() -eval(bltns) var DEFAULT_MIN_MERGE=32 var DEFAULT_MIN_GALLOPING=7 var DEFAULT_TMP_STORAGE_LENGTH=256 @@ -10612,10 +11406,14 @@ list.__add__=function(self,other){if($B.get_class(self)!==$B.get_class(other)){v if(radd !==_b_.NotImplemented){return radd(self)} throw _b_.TypeError.$factory('can only concatenate list (not "'+ $B.class_name(other)+'") to list')} -var res=self.valueOf().concat(other.valueOf()) +var res=self.slice(),is_js=other.$brython_class=="js" +for(const item of other){res.push(is_js ? $B.$JS2Py(item):item)} res.__brython__=true if(isinstance(self,tuple)){res=tuple.$factory(res)} return res} +list.__class_getitem__=function(cls,item){ +if(! Array.isArray(item)){item=[item]} +return $B.GenericAlias.$factory(cls,item)} list.__contains__=function(self,item){var $=$B.args("__contains__",2,{self:null,item:null},["self","item"],arguments,{},null,null),self=$.self,item=$.item var _eq=function(other){return $B.rich_comp("__eq__",item,other)} var i=0 @@ -10626,7 +11424,8 @@ list.__delitem__=function(self,arg){if(isinstance(arg,_b_.int)){var pos=arg if(arg < 0){pos=self.length+pos} if(pos >=0 && pos < self.length){self.splice(pos,1) return $N} -throw _b_.IndexError.$factory("list index out of range")} +throw _b_.IndexError.$factory($B.class_name(self)+ +" index out of range")} if(isinstance(arg,_b_.slice)){var step=arg.step if(step===$N){step=1} var start=arg.start @@ -10643,20 +11442,26 @@ while(i--){self.splice(res[i],1)} return $N} if(_b_.hasattr(arg,"__int__")||_b_.hasattr(arg,"__index__")){list.__delitem__(self,_b_.int.$factory(arg)) return $N} -throw _b_.TypeError.$factory("list indices must be integer, not "+ -_b_.str.$factory(arg.__class__))} +throw _b_.TypeError.$factory($B.class_name(self)+ +" indices must be integer, not "+$B.class_name(arg))} list.__eq__=function(self,other){if(isinstance(self,list)){var klass=list}else{var klass=tuple} if(isinstance(other,klass)){if(other.length==self.length){var i=self.length while(i--){if(! $B.rich_comp("__eq__",self[i],other[i])){return false}} return true}} return _b_.NotImplemented} -list.__getitem__=function(self,arg){var $=$B.args("__getitem__",2,{self:null,key:null},["self","key"],arguments,{},null,null),self=$.self,key=$.key -var factory=$B.get_class(self).$factory +list.__getitem__=function(self,key){ +$B.check_no_kw("__getitem__",self,key) +$B.check_nb_args("__getitem__",2,arguments) +return list.$getitem(self,key)} +list.$getitem=function(self,key){var factory=(self.__class__ ||$B.get_class(self)).$factory if(isinstance(key,_b_.int)){var items=self.valueOf(),pos=key if(key < 0){pos=items.length+pos} if(pos >=0 && pos < items.length){return items[pos]} -throw _b_.IndexError.$factory("list index out of range")} -if(isinstance(key,_b_.slice)){ +throw _b_.IndexError.$factory($B.class_name(self)+ +" index out of range")} +if(key.__class__===_b_.slice ||isinstance(key,_b_.slice)){ +if(key.start===_b_.None && key.stop===_b_.None && +key.step===_b_.None){return self.slice()} var s=_b_.slice.$conv_for_seq(key,self.length) var res=[],i=null,items=self.valueOf(),pos=0,start=s.start,stop=s.stop,step=s.step if(step > 0){if(stop <=start){return factory(res)} @@ -10665,8 +11470,8 @@ return factory(res)}else{if(stop > start){return factory(res)} for(var i=start;i > stop;i+=step){res[pos++]=items[i]} return factory(res)}} if(_b_.hasattr(key,"__int__")||_b_.hasattr(key,"__index__")){return list.__getitem__(self,_b_.int.$factory(key))} -throw _b_.TypeError.$factory("list indices must be integer, not "+ -$B.class_name(key))} +throw _b_.TypeError.$factory($B.class_name(self)+ +" indices must be integer, not "+$B.class_name(key))} list.__ge__=function(self,other){if(! isinstance(other,[list,_b_.tuple])){return _b_.NotImplemented} var i=0 while(i < self.length){if(i >=other.length){return true} @@ -10714,7 +11519,7 @@ return ! res} list.__len__=function(self){return self.length} list.__lt__=function(self,other){if(! isinstance(other,[list,_b_.tuple])){return _b_.NotImplemented} var i=0 -while(i < self.length){if(i >=other.length){return true} +while(i < self.length){if(i >=other.length){return false} if($B.rich_comp("__eq__",self[i],other[i])){i++}else{res=getattr(self[i],"__lt__")(other[i]) if(res===_b_.NotImplemented){throw _b_.TypeError.$factory("unorderable types: "+ $B.class_name(self[i])+"() >= "+ @@ -10735,31 +11540,29 @@ list.__new__=function(cls,...args){if(cls===undefined){throw _b_.TypeError.$fact var res=[] res.__class__=cls res.__brython__=true -res.__dict__=_b_.dict.$factory() +res.__dict__=$B.empty_dict() +return res} +list.__repr__=function(self){if($B.repr.enter(self)){ +return '[...]'} +var _r=[],res +for(var i=0;i < self.length;i++){_r.push(_b_.repr(self[i]))} +if(self.__class__===tuple){if(self.length==1){res="("+_r[0]+",)"}else{res="("+_r.join(", ")+")"}}else{res="["+_r.join(", ")+"]"} +$B.repr.leave(self) return res} -list.__repr__=function(self){if(self===undefined){return ""} -var _r=[] -for(var i=0;i < self.length;i++){if(self[i]===self){_r.push('[...]')} -else{_r.push(_b_.repr(self[i]))}} -if(self.__class__===tuple){if(self.length==1){return "("+_r[0]+",)"} -return "("+_r.join(", ")+")"} -return "["+_r.join(", ")+"]"} list.__setattr__=function(self,attr,value){if(self.__class__===list){if(list.hasOwnProperty(attr)){throw _b_.AttributeError.$factory("'list' object attribute '"+ attr+"' is read-only")}else{throw _b_.AttributeError.$factory( "'list' object has no attribute '"+attr+"'")}} -self.__dict__.$string_dict[attr]=value +_b_.dict.$setitem(self.__dict__,attr,value) return $N} list.__setitem__=function(){var $=$B.args("__setitem__",3,{self:null,key:null,value:null},["self","key","value"],arguments,{},null,null),self=$.self,arg=$.key,value=$.value list.$setitem(self,arg,value)} list.$setitem=function(self,arg,value){ if(typeof arg=="number" ||isinstance(arg,_b_.int)){var pos=arg if(arg < 0){pos=self.length+pos} -if(pos >=0 && pos < self.length){self[pos]=value} -else{throw _b_.IndexError.$factory("list index out of range")} +if(pos >=0 && pos < self.length){self[pos]=value}else{throw _b_.IndexError.$factory("list index out of range")} return $N} if(isinstance(arg,_b_.slice)){var s=_b_.slice.$conv_for_seq(arg,self.length) -if(arg.step===null){$B.set_list_slice(self,s.start,s.stop,value)} -else{$B.set_list_slice_step(self,s.start,s.stop,s.step,value)} +if(arg.step===null){$B.set_list_slice(self,s.start,s.stop,value)}else{$B.set_list_slice_step(self,s.start,s.stop,s.step,value)} return $N} if(_b_.hasattr(arg,"__int__")||_b_.hasattr(arg,"__index__")){list.__setitem__(self,_b_.int.$factory(arg),value) return $N} @@ -10767,8 +11570,9 @@ throw _b_.TypeError.$factory("list indices must be integer, not "+ $B.class_name(arg))} $B.make_rmethods(list) var _ops=["add","sub"] -list.append=function(){var $=$B.args("append",2 ,{self:null,x:null},["self","x"],arguments,{},null,null) -$.self[$.self.length]=$.x +list.append=function(self,x){$B.check_no_kw("append",self,x) +$B.check_nb_args("append",2,arguments) +self.push(x) return $N} list.clear=function(){var $=$B.args("clear",1,{self:null},["self"],arguments,{},null,null) while($.self.length){$.self.pop()} @@ -10792,7 +11596,8 @@ else{if(stop.__class__===$B.long_int){stop=parseInt(stop.value)*(stop.pos ? 1 :- if(stop < 0){stop=Math.min(self.length,stop+self.length)} stop=Math.min(stop,self.length)} for(var i=start;i < stop;i++){if(_eq(self[i])){return i}} -throw _b_.ValueError.$factory(_b_.str.$factory($.x)+" is not in list")} +throw _b_.ValueError.$factory(_b_.repr($.x)+" is not in "+ +$B.class_name(self))} list.insert=function(){var $=$B.args("insert",3,{self:null,i:null,item:null},["self","i","item"],arguments,{},null,null) $.self.splice($.i,0,$.item) return $N} @@ -10846,37 +11651,35 @@ return cl} list.sort=function(self){var $=$B.args("sort",1,{self:null},["self"],arguments,{},null,"kw") check_not_tuple(self,"sort") var func=$N,reverse=false,kw_args=$.kw,keys=_b_.list.$factory(_b_.dict.$$keys(kw_args)) -for(var i=0;i < keys.length;i++){if(keys[i]=="key"){func=kw_args.$string_dict[keys[i]]} -else if(keys[i]=="reverse"){reverse=kw_args.$string_dict[keys[i]]} -else{throw _b_.TypeError.$factory("'"+keys[i]+ +for(var i=0;i < keys.length;i++){if(keys[i]=="key"){func=kw_args.$string_dict[keys[i]][0]}else if(keys[i]=="reverse"){reverse=kw_args.$string_dict[keys[i]][0]}else{throw _b_.TypeError.$factory("'"+keys[i]+ "' is an invalid keyword argument for this function")}} if(self.length==0){return} if(func !==$N){func=$B.$call(func)} self.$cl=$elts_class(self) var cmp=null; -if(func===$N && self.$cl===_b_.str){if(reverse){cmp=function(b,a){return $B.$AlphabeticalCompare(a,b)}}else{cmp=function(a,b){return $B.$AlphabeticalCompare(a,b)}}}else if(func===$N && self.$cl===_b_.int){if(reverse){cmp=function(b,a){return a-b}}else{cmp=function(a,b){return a-b}}}else{if(func===$N){if(reverse){cmp=function(b,a){res=getattr(a,"__le__")(b) +if(func===$N && self.$cl===_b_.str){if(reverse){cmp=function(b,a){return $B.$AlphabeticalCompare(a,b)}}else{cmp=function(a,b){return $B.$AlphabeticalCompare(a,b)}}}else if(func===$N && self.$cl===_b_.int){if(reverse){cmp=function(b,a){return a-b}}else{cmp=function(a,b){return a-b}}}else{if(func===$N){if(reverse){cmp=function(b,a){res=getattr(a,"__lt__")(b) if(res===_b_.NotImplemented){throw _b_.TypeError.$factory("unorderable types: "+ -$B.class_name(b)+"() <="+ +$B.class_name(b)+"() < "+ $B.class_name(a)+"()")} if(res){if(a==b){return 0} return-1} -return 1}}else{cmp=function(a,b){res=getattr(a,"__le__")(b) +return 1}}else{cmp=function(a,b){res=getattr(a,"__lt__")(b) if(res===_b_.NotImplemented){throw _b_.TypeError.$factory("unorderable types: "+ -$B.class_name(a)+"() <="+ +$B.class_name(a)+"() < "+ $B.class_name(b)+"()")} if(res){if(a==b){return 0} return-1} return 1}}}else{if(reverse){cmp=function(b,a){var _a=func(a),_b=func(b) -res=getattr(_a,"__le__")(_b) +res=getattr(_a,"__lt__")(_b) if(res===_b_.NotImplemented){throw _b_.TypeError.$factory("unorderable types: "+ -$B.class_name(b)+"() <="+ +$B.class_name(b)+"() < "+ $B.class_name(a)+"()")} if(res){if(_a==_b){return 0} return-1} return 1}}else{cmp=function(a,b){var _a=func(a),_b=func(b) res=$B.$getattr(_a,"__lt__")(_b) if(res===_b_.NotImplemented){throw _b_.TypeError.$factory("unorderable types: "+ -$B.class_name(a)+"() <="+ +$B.class_name(a)+"() < "+ $B.class_name(b)+"()")} if(res){if(_a==_b){return 0} return-1} @@ -10895,12 +11698,12 @@ res.__class__=list return res} return obj} var res=[],pos=0,arg=$B.$iter(obj),next_func=$B.$call(getattr(arg,"__next__")) -while(1){try{res[pos++]=next_func()} -catch(err){if(!isinstance(err,_b_.StopIteration)){throw err} +while(1){try{res[pos++]=next_func()}catch(err){if(!isinstance(err,_b_.StopIteration)){throw err} break}} res.__brython__=true return res} $B.set_func_names(list,"builtins") +list.__class_getitem__=_b_.classmethod.$factory(list.__class_getitem__) var JSArray=$B.JSArray=$B.make_class("JSArray",function(array){return{ __class__:JSArray,js:array}} ) @@ -10920,7 +11723,7 @@ obj.__class__=tuple return obj} $B.fast_tuple=function(array){array.__class__=tuple array.__brython__=true -array.__dict__=_b_.dict.$factory() +array.__dict__=$B.empty_dict() return array} for(var attr in list){switch(attr){case "__delitem__": case "__iadd__": @@ -10933,7 +11736,7 @@ case "remove": case "reverse": break default: -if(tuple[attr]===undefined){if(typeof list[attr]=="function"){tuple[attr]=(function(x){return function(){return list[x].apply(null,arguments)}})(attr)}else{}}}} +if(tuple[attr]===undefined){if(typeof list[attr]=="function"){tuple[attr]=(function(x){return function(){return list[x].apply(null,arguments)}})(attr)}}}} tuple.__eq__=function(self,other){ if(other===undefined){return self===tuple} return list.__eq__(self,other)} @@ -10950,7 +11753,7 @@ tuple.__new__=function(cls,...args){if(cls===undefined){throw _b_.TypeError.$fac var self=[] self.__class__=cls self.__brython__=true -self.__dict__=_b_.dict.$factory() +self.__dict__=$B.empty_dict() var arg=$B.$iter(args[0]),next_func=$B.$call(getattr(arg,"__next__")) while(1){try{var item=next_func() self.push(item)} @@ -10962,26 +11765,27 @@ _b_.list=list _b_.tuple=tuple _b_.object.__bases__=tuple.$factory()})(__BRYTHON__) ; + +var $B=__BRYTHON__ +$B.unicode={"Cc":[[0,32],[127,33]],"Zs":[32,160,5760,6158,[8192,11],8239,8287,12288],"Po":[[33,3],[37,3],[42,3,2],47,58,59,63,64,92,161,183,191,894,903,[1370,6],1417,[1472,3,3],1523,1524,1545,1546,1548,1549,1563,1566,1567,[1642,4],1748,[1792,14],[2039,3],[2096,15],2404,2405,2416,3572,3663,3674,3675,[3844,15],3973,[4048,5],[4170,6],4347,[4961,8],5741,5742,[5867,3],5941,5942,[6100,3],[6104,3],[6144,6],[6151,4],6468,6469,6622,6623,6686,6687,[6816,7],[6824,6],[7002,7],[7227,5],7294,7295,7379,8214,8215,[8224,8],[8240,9],[8251,4],[8257,3],[8263,11],8275,[8277,10],[11513,4],11518,11519,11776,11777,[11782,3],11787,[11790,9],11800,11801,11803,11806,11807,[11818,5],11824,11825,[12289,3],12349,12539,42238,42239,[42509,3],42611,42622,[42738,6],[43124,4],43214,43215,[43256,3],43310,43311,43359,[43457,13],43486,43487,[43612,4],43742,43743,44011,[65040,7],65049,65072,65093,65094,[65097,4],[65104,3],[65108,4],[65119,3],65128,65130,65131,[65281,3],[65285,3],[65290,3,2],65295,65306,65307,65311,65312,65340,65377,65380,65381,65792,65793,66463,66512,67671,67871,67903,[68176,9],68223,[68409,7],69819,69820,[69822,4],[74864,4]],"Sc":[36,[162,4],1547,2546,2547,2555,2801,3065,3647,6107,[8352,25],43064,65020,65129,65284,65504,65505,65509,65510],"Ps":[40,91,123,3898,3900,5787,8218,8222,8261,8317,8333,9001,[10088,7,2],10181,[10214,5,2],[10627,11,2],10712,10714,10748,[11810,4,2],[12296,5,2],[12308,4,2],12317,64830,65047,[65077,8,2],65095,[65113,3,2],65288,65339,65371,65375,65378],"Pe":[41,93,125,3899,3901,5788,8262,8318,8334,9002,[10089,7,2],10182,[10215,5,2],[10628,11,2],10713,10715,10749,[11811,4,2],[12297,5,2],[12309,4,2],12318,12319,64831,65048,[65078,8,2],65096,[65114,3,2],65289,65341,[65373,3,3]],"Sm":[43,[60,3],124,126,172,177,215,247,1014,[1542,3],8260,8274,[8314,3],[8330,3],[8512,5],8523,[8592,5],8602,8603,[8608,3,3],8622,8654,8655,8658,8660,[8692,268],[8968,4],8992,8993,9084,[9115,25],[9180,6],9655,9665,[9720,8],9839,[10176,5],[10183,4],10188,[10192,22],[10224,16],[10496,131],[10649,63],[10716,32],[10750,258],[11056,21],[11079,6],64297,65122,[65124,3],65291,[65308,3],65372,65374,65506,[65513,4],120513,120539,120571,120597,120629,120655,120687,120713,120745,120771],"Pd":[45,1418,1470,5120,6150,[8208,6],11799,11802,12316,12336,12448,65073,65074,65112,65123,65293],"Nd":[[48,10],[1632,10],[1776,10],[1984,10],[2406,10],[2534,10],[2662,10],[2790,10],[2918,10],[3046,10],[3174,10],[3302,10],[3430,10],[3664,10],[3792,10],[3872,10],[4160,10],[4240,10],[6112,10],[6160,10],[6470,10],[6608,11],[6784,10],[6800,10],[6992,10],[7088,10],[7232,10],[7248,10],[42528,10],[43216,10],[43264,10],[43472,10],[43600,10],[44016,10],[65296,10],[66720,10],[120782,50]],"Lu":[[65,26],[192,23],[216,7],[256,28,2],[313,8,2],[330,24,2],[377,3,2],385,[386,3,2],391,[393,3],[398,4],403,404,[406,3],412,413,415,[416,4,2],423,425,428,430,431,[433,3],437,439,440,444,[452,4,3],[463,7,2],[478,9,2],497,500,[502,3],[506,29,2],570,571,573,574,577,[579,4],[584,4,2],880,882,886,902,[904,3],908,910,911,[913,17],[931,9],975,[978,3],[984,12,2],1012,1015,1017,1018,[1021,51],[1120,17,2],[1162,28,2],[1217,7,2],[1232,43,2],[1329,38],[4256,38],[7680,75,2],[7838,49,2],[7944,8],[7960,6],[7976,8],[7992,8],[8008,6],[8025,4,2],[8040,8],[8120,4],[8136,4],[8152,4],[8168,5],[8184,4],8450,8455,[8459,3],[8464,3],8469,[8473,5],[8484,4,2],[8491,3],[8496,4],8510,8511,8517,8579,[11264,47],11360,[11362,3],[11367,4,2],[11374,3],11378,11381,[11390,3],[11394,49,2],11499,11501,[42560,16,2],[42594,6,2],[42624,12,2],[42786,7,2],[42802,31,2],[42873,3,2],[42878,5,2],42891,[65313,26],[66560,40],[119808,26],[119860,26],[119912,26],119964,119966,[119967,3,3],119974,[119977,4],[119982,8],[120016,26],120068,120069,[120071,4],[120077,8],[120086,7],120120,120121,[120123,4],[120128,5],120134,[120138,7],[120172,26],[120224,26],[120276,26],[120328,26],[120380,26],[120432,26],[120488,25],[120546,25],[120604,25],[120662,25],[120720,25],120778],"Sk":[94,96,168,175,180,184,[706,4],[722,14],[741,7],749,[751,17],885,900,901,8125,[8127,3],[8141,3],[8157,3],[8173,3],8189,8190,12443,12444,[42752,23],42784,42785,42889,42890,65342,65344,65507],"Pc":[95,8255,8256,8276,65075,65076,[65101,3],65343],"Ll":[[97,26],170,181,186,[223,24],[248,8],[257,28,2],[312,9,2],[329,24,2],[378,3,2],383,384,387,389,392,396,397,402,405,[409,3],414,[417,3,2],424,426,427,429,432,436,438,441,442,[445,3],[454,3,3],[462,8,2],[477,10,2],496,499,501,[505,30,2],[564,6],572,575,576,578,[583,5,2],[592,68],[661,27],881,[883,3,4],892,893,912,[940,35],976,977,[981,3],[985,12,2],[1008,4],[1013,3,3],1020,[1072,48],[1121,17,2],[1163,27,2],[1218,7,2],[1231,44,2],[1377,39],[7424,44],[7522,22],[7545,34],[7681,75,2],[7830,8],[7839,49,2],[7936,8],[7952,6],[7968,8],[7984,8],[8000,6],[8016,8],[8032,8],[8048,14],[8064,8],[8080,8],[8096,8],[8112,5],8118,8119,8126,[8130,3],8134,8135,[8144,4],8150,8151,[8160,8],[8178,3],8182,8183,8458,8462,8463,8467,[8495,3,5],8508,8509,[8518,4],8526,8580,[11312,47],11361,11365,[11366,4,2],11377,11379,11380,[11382,7],[11393,50,2],11492,11500,11502,[11520,38],[42561,16,2],[42595,6,2],[42625,12,2],[42787,7,2],42800,[42801,33,2],[42866,7],42874,42876,[42879,5,2],42892,[64256,7],[64275,5],[65345,26],[66600,40],[119834,26],[119886,7],[119894,18],[119938,26],[119990,4],119995,[119997,7],[120005,11],[120042,26],[120094,26],[120146,26],[120198,26],[120250,26],[120302,26],[120354,26],[120406,26],[120458,28],[120514,25],[120540,6],[120572,25],[120598,6],[120630,25],[120656,6],[120688,25],[120714,6],[120746,25],[120772,6],120779],"So":[166,167,169,174,176,182,1154,1550,1551,1769,1789,1790,2038,2554,2928,[3059,6],3066,3199,3313,3314,3449,[3841,3],[3859,5],[3866,6],[3892,3,2],[4030,8],[4039,6],4046,4047,[4053,4],4254,4255,4960,[5008,10],6464,[6624,32],[7009,10],[7028,9],8448,8449,[8451,4],8456,8457,8468,[8470,3],[8478,6],[8485,3,2],8494,8506,8507,8522,8524,8525,8527,[8597,5],[8604,4],8609,8610,8612,8613,[8615,7],[8623,31],8656,[8657,3,2],[8662,30],[8960,8],[8972,20],[8994,7],[9003,81],[9085,30],[9140,40],[9186,7],[9216,39],[9280,11],[9372,78],[9472,183],[9656,9],[9666,54],[9728,111],[9840,94],[9935,19],9955,[9960,24],[9985,4],[9990,4],[9996,28],[10025,35],10061,[10063,4],[10070,9],[10081,7],10132,[10136,24],[10161,14],[10240,256],[11008,48],11077,11078,[11088,10],[11493,6],[11904,26],[11931,89],[12032,214],[12272,12],12292,12306,12307,12320,12342,12343,12350,12351,12688,12689,[12694,10],[12736,36],[12800,31],[12842,39],[12896,32],[12938,39],[12992,63],[13056,256],[19904,64],[42128,55],[43048,4],43062,43063,43065,[43639,3],65021,65508,65512,65517,65518,65532,65533,65794,[65847,9],[65913,17],[65936,12],[66000,45],[118784,246],[119040,39],[119081,60],[119146,3],119171,119172,[119180,30],[119214,48],[119296,66],119365,[119552,87],[126976,44],[127024,100],[127248,31],127281,127293,127295,[127298,3,4],[127307,4],127319,127327,127353,127355,127356,127359,[127370,4],127376,127488,[127504,34],[127552,9]],"Pi":[171,8216,8219,8220,8223,8249,11778,11780,11785,11788,11804,11808],"Cf":[173,[1536,4],1757,1807,6068,6069,[8203,5],[8234,5],[8288,5],[8298,6],65279,[65529,3],69821,[119155,8],917505,[917536,96]],"No":[178,179,185,[188,3],[2548,6],[3056,3],[3192,7],[3440,6],[3882,10],[4969,20],[6128,10],8304,[8308,6],[8320,10],[8528,16],8585,[9312,60],[9450,22],[10102,30],11517,[12690,4],[12832,10],[12881,15],[12928,10],[12977,15],[43056,6],[65799,45],[65909,4],65930,[66336,4],[67672,8],[67862,6],[68160,8],68221,68222,[68440,8],[68472,8],[69216,31],[119648,18],[127232,11]],"Pf":[187,8217,8221,8250,11779,11781,11786,11789,11805,11809],"Lo":[443,[448,4],660,[1488,27],[1520,3],[1569,31],[1601,10],1646,1647,[1649,99],1749,1774,1775,[1786,3],1791,1808,[1810,30],[1869,89],1969,[1994,33],[2048,22],[2308,54],2365,2384,[2392,10],2418,[2425,7],[2437,8],2447,2448,[2451,22],[2474,7],2482,[2486,4],2493,2510,2524,2525,[2527,3],2544,2545,[2565,6],2575,2576,[2579,22],[2602,7],2610,2611,2613,2614,2616,2617,[2649,4],2654,[2674,3],[2693,9],[2703,3],[2707,22],[2730,7],2738,2739,[2741,5],2749,2768,2784,2785,[2821,8],2831,2832,[2835,22],[2858,7],2866,2867,[2869,5],2877,2908,2909,[2911,3],2929,2947,[2949,6],[2958,3],[2962,4],2969,[2970,3,2],2975,2979,2980,[2984,3],[2990,12],3024,[3077,8],[3086,3],[3090,23],[3114,10],[3125,5],3133,3160,3161,3168,3169,[3205,8],[3214,3],[3218,23],[3242,10],[3253,5],3261,3294,3296,3297,[3333,8],[3342,3],[3346,23],[3370,16],3389,3424,3425,[3450,6],[3461,18],[3482,24],[3507,9],3517,[3520,7],[3585,48],3634,3635,[3648,6],3713,3714,3716,3719,3720,3722,3725,[3732,4],[3737,7],[3745,3],3749,3751,3754,3755,[3757,4],3762,3763,3773,[3776,5],3804,3805,3840,[3904,8],[3913,36],[3976,4],[4096,43],4159,[4176,6],[4186,4],4193,4197,4198,[4206,3],[4213,13],4238,[4304,43],[4352,329],[4682,4],[4688,7],4696,[4698,4],[4704,41],[4746,4],[4752,33],[4786,4],[4792,7],4800,[4802,4],[4808,15],[4824,57],[4882,4],[4888,67],[4992,16],[5024,85],[5121,620],[5743,17],[5761,26],[5792,75],[5888,13],[5902,4],[5920,18],[5952,18],[5984,13],[5998,3],[6016,52],6108,[6176,35],[6212,52],[6272,41],6314,[6320,70],[6400,29],[6480,30],[6512,5],[6528,44],[6593,7],[6656,23],[6688,53],[6917,47],[6981,7],[7043,30],7086,7087,[7168,36],[7245,3],[7258,30],[7401,4],[7406,4],[8501,4],[11568,54],[11648,23],[11680,7],[11688,7],[11696,7],[11704,7],[11712,7],[11720,7],[11728,7],[11736,7],12294,12348,[12353,86],12447,[12449,90],12543,[12549,41],[12593,94],[12704,24],[12784,16],13312,19893,19968,40907,[40960,21],[40982,1143],[42192,40],[42240,268],[42512,16],42538,42539,42606,[42656,70],[43003,7],[43011,3],[43015,4],[43020,23],[43072,52],[43138,50],[43250,6],43259,[43274,28],[43312,23],[43360,29],[43396,47],[43520,41],[43584,3],[43588,8],[43616,16],[43633,6],43642,[43648,48],43697,43701,43702,[43705,5],43712,43714,43739,43740,[43968,35],44032,55203,[55216,23],[55243,49],[63744,302],[64048,62],[64112,106],64285,[64287,10],[64298,13],[64312,5],64318,64320,64321,64323,64324,[64326,108],[64467,363],[64848,64],[64914,54],[65008,12],[65136,5],[65142,135],[65382,10],[65393,45],[65440,31],[65474,6],[65482,6],[65490,6],[65498,3],[65536,12],[65549,26],[65576,19],65596,65597,[65599,15],[65616,14],[65664,123],[66176,29],[66208,49],[66304,31],[66352,17],[66370,8],[66432,30],[66464,36],[66504,8],[66640,78],[67584,6],67592,[67594,44],67639,67640,67644,[67647,23],[67840,22],[67872,26],68096,[68112,4],[68117,3],[68121,27],[68192,29],[68352,54],[68416,22],[68448,19],[68608,73],[69763,45],[73728,879],[77824,1071],131072,173782,173824,177972,[194560,542]],"Lt":[[453,3,3],498,[8072,8],[8088,8],[8104,8],8124,8140,8188],"Lm":[[688,18],[710,12],[736,5],748,750,884,890,1369,1600,1765,1766,2036,2037,2042,2074,2084,2088,2417,3654,3782,4348,6103,6211,6823,[7288,6],[7468,54],7544,[7579,37],8305,8319,[8336,5],11389,11631,11823,12293,[12337,5],12347,12445,12446,[12540,3],40981,[42232,6],42508,42623,[42775,9],42864,42888,43471,43632,43741,65392,65438,65439],"Mn":[[768,112],[1155,5],[1425,45],1471,1473,1474,1476,1477,1479,[1552,11],[1611,20],1648,[1750,7],[1759,6],1767,1768,[1770,4],1809,[1840,27],[1958,11],[2027,9],[2070,4],[2075,9],[2085,3],[2089,5],[2304,3],2364,[2369,8],2381,[2385,5],2402,2403,2433,2492,[2497,4],2509,2530,2531,2561,2562,2620,2625,2626,2631,2632,[2635,3],2641,2672,2673,2677,2689,2690,2748,[2753,5],2759,2760,2765,2786,2787,2817,2876,2879,[2881,4],2893,2902,2914,2915,2946,3008,3021,[3134,3],[3142,3],[3146,4],3157,3158,3170,3171,3260,3263,3270,3276,3277,3298,3299,[3393,4],3405,3426,3427,3530,[3538,3],3542,3633,[3636,7],[3655,8],3761,[3764,6],3771,3772,[3784,6],3864,3865,[3893,3,2],[3953,14],[3968,5],3974,3975,[3984,8],[3993,36],4038,[4141,4],[4146,6],4153,4154,4157,4158,4184,4185,[4190,3],[4209,4],4226,4229,4230,4237,4253,4959,[5906,3],[5938,3],5970,5971,6002,6003,[6071,7],6086,[6089,11],6109,[6155,3],6313,[6432,3],6439,6440,6450,[6457,3],6679,6680,6742,[6744,7],6752,6754,[6757,8],[6771,10],6783,[6912,4],6964,[6966,5],6972,6978,[7019,9],7040,7041,[7074,4],7080,7081,[7212,8],7222,7223,[7376,3],[7380,13],[7394,7],7405,[7616,39],[7677,3],[8400,13],8417,[8421,12],[11503,3],[11744,32],[12330,6],12441,12442,42607,42620,42621,42736,42737,43010,43014,43019,43045,43046,43204,[43232,18],[43302,8],[43335,11],[43392,3],43443,[43446,4],43452,[43561,6],43569,43570,43573,43574,43587,43596,43696,[43698,3],43703,43704,43710,43711,43713,44005,44008,44013,64286,[65024,16],[65056,7],66045,[68097,3],68101,68102,[68108,4],[68152,3],68159,69760,69761,[69811,4],69817,69818,[119143,3],[119163,8],[119173,7],[119210,4],[119362,3],[917760,240]],"Me":[1160,1161,1758,[8413,4],[8418,3],[42608,3]],"Mc":[2307,[2366,3],[2377,4],2382,2434,2435,[2494,3],2503,2504,2507,2508,2519,2563,[2622,3],2691,[2750,3],2761,2763,2764,2818,2819,2878,2880,2887,2888,2891,2892,2903,3006,3007,3009,3010,[3014,3],[3018,3],3031,[3073,3],[3137,4],3202,3203,3262,[3264,5],3271,3272,3274,3275,3285,3286,3330,3331,[3390,3],[3398,3],[3402,3],3415,3458,3459,[3535,3],[3544,8],3570,3571,3902,3903,3967,4139,4140,4145,4152,4155,4156,4182,4183,[4194,3],[4199,7],4227,4228,[4231,6],4239,[4250,3],6070,[6078,8],6087,6088,[6435,4],[6441,3],6448,6449,[6451,6],[6576,17],6600,6601,[6681,3],6741,6743,6753,6755,6756,[6765,6],6916,6965,6971,[6973,5],6979,6980,7042,7073,7078,7079,7082,[7204,8],7220,7221,7393,7410,43043,43044,43047,43136,43137,[43188,16],43346,43347,43395,43444,43445,43450,43451,[43453,4],43567,43568,43571,43572,43597,43643,44003,44004,44006,44007,44009,44010,44012,69762,[69808,3],69815,69816,119141,119142,[119149,6]],"Nl":[[5870,3],[8544,35],[8581,4],12295,[12321,9],[12344,3],[42726,10],[65856,53],66369,66378,[66513,5],[74752,99]],"Zl":[8232],"Zp":[8233],"Cs":[55296,56191,56192,56319,56320,57343],"Co":[57344,63743,983040,1048573,1048576,1114109],"digits":[[48,10],178,179,185,[1632,10],[1776,10],[1984,10],[2406,10],[2534,10],[2662,10],[2790,10],[2918,10],[3046,10],[3174,10],[3302,10],[3430,10],[3558,10],[3664,10],[3792,10],[3872,10],[4160,10],[4240,10],[4969,9],[6112,10],[6160,10],[6470,10],[6608,11],[6784,10],[6800,10],[6992,10],[7088,10],[7232,10],[7248,10],8304,[8308,6],[8320,10],[9312,9],[9332,9],[9352,9],9450,[9461,9],9471,[10102,9],[10112,9],[10122,9],[42528,10],[43216,10],[43264,10],[43472,10],[43504,10],[43600,10],[44016,10],[65296,10],[66720,10],[68160,4],[68912,10],[69216,9],[69714,9],[69734,10],[69872,10],[69942,10],[70096,10],[70384,10],[70736,10],[70864,10],[71248,10],[71360,10],[71472,10],[71904,10],[72784,10],[73040,10],[73120,10],[92768,10],[93008,10],[120782,50],[123200,10],[123632,10],[125264,10],[127232,11]],"numeric":[[48,10],178,179,185,[188,3],[1632,10],[1776,10],[1984,10],[2406,10],[2534,10],[2548,6],[2662,10],[2790,10],[2918,10],[2930,6],[3046,13],[3174,10],[3192,7],[3302,10],[3416,7],[3430,19],[3558,10],[3664,10],[3792,10],[3872,20],[4160,10],[4240,10],[4969,20],[5870,3],[6112,10],[6128,10],[6160,10],[6470,10],[6608,11],[6784,10],[6800,10],[6992,10],[7088,10],[7232,10],[7248,10],8304,[8308,6],[8320,10],[8528,51],[8581,5],[9312,60],[9450,22],[10102,30],11517,12295,[12321,9],[12344,3],[12690,4],[12832,10],[12872,8],[12881,15],[12928,10],[12977,15],13317,13443,14378,15181,19968,19971,19975,19977,20061,20108,20116,20118,20159,20160,20191,20200,20237,20336,20740,20806,[20841,3,2],21313,[21315,3],21324,[21441,4],22235,22769,22777,24186,24318,24319,[24332,3],24336,25342,25420,26578,28422,29590,30334,32902,33836,36014,36019,36144,38433,38470,38476,38520,38646,[42528,10],[42726,10],[43056,6],[43216,10],[43264,10],[43472,10],[43504,10],[43600,10],[44016,10],63851,63859,63864,63922,63953,63955,63997,[65296,10],[65799,45],[65856,57],65930,65931,[66273,27],[66336,4],66369,66378,[66513,5],[66720,10],[67672,8],[67705,7],[67751,9],[67835,5],[67862,6],68028,68029,[68032,16],[68050,46],[68160,9],68221,68222,[68253,3],[68331,5],[68440,8],[68472,8],[68521,7],[68858,6],[68912,10],[69216,31],[69405,10],[69457,4],[69714,30],[69872,10],[69942,10],[70096,10],[70113,20],[70384,10],[70736,10],[70864,10],[71248,10],[71360,10],[71472,12],[71904,19],[72784,29],[73040,10],[73120,10],[73664,21],[74752,111],[92768,10],[93008,10],[93019,7],[93824,23],[119520,20],[119648,25],[120782,50],[123200,10],[123632,10],[125127,9],[125264,10],[126065,59],[126125,3],[126129,4],[126209,45],[126255,15],[127232,13],131073,131172,131298,131361,133418,133507,133516,133532,133866,133885,133913,140176,141720,146203,156269,194704],"Cn":[[888,2],[896,4],[907,1],[909,1],[930,1],[1328,1],[1367,2],[1419,2],[1424,1],[1480,8],[1515,4],[1525,11],[1565,1],[1806,1],[1867,2],[1970,14],[2043,2],[2094,2],[2111,1],[2140,2],[2143,1],[2155,53],[2229,1],[2238,21],[2436,1],[2445,2],[2449,2],[2473,1],[2481,1],[2483,3],[2490,2],[2501,2],[2505,2],[2511,8],[2520,4],[2526,1],[2532,2],[2559,2],[2564,1],[2571,4],[2577,2],[2601,1],[2609,1],[2612,1],[2615,1],[2618,2],[2621,1],[2627,4],[2633,2],[2638,3],[2642,7],[2653,1],[2655,7],[2679,10],[2692,1],[2702,1],[2706,1],[2729,1],[2737,1],[2740,1],[2746,2],[2758,1],[2762,1],[2766,2],[2769,15],[2788,2],[2802,7],[2816,1],[2820,1],[2829,2],[2833,2],[2857,1],[2865,1],[2868,1],[2874,2],[2885,2],[2889,2],[2894,8],[2904,4],[2910,1],[2916,2],[2936,10],[2948,1],[2955,3],[2961,1],[2966,3],[2971,1],[2973,1],[2976,3],[2981,3],[2987,3],[3002,4],[3011,3],[3017,1],[3022,2],[3025,6],[3032,14],[3067,5],[3085,1],[3089,1],[3113,1],[3130,3],[3141,1],[3145,1],[3150,7],[3159,1],[3163,5],[3172,2],[3184,7],[3213,1],[3217,1],[3241,1],[3252,1],[3258,2],[3269,1],[3273,1],[3278,7],[3287,7],[3295,1],[3300,2],[3312,1],[3315,13],[3332,1],[3341,1],[3345,1],[3397,1],[3401,1],[3408,4],[3428,2],[3456,2],[3460,1],[3479,3],[3506,1],[3516,1],[3518,2],[3527,3],[3531,4],[3541,1],[3543,1],[3552,6],[3568,2],[3573,12],[3643,4],[3676,37],[3715,1],[3717,1],[3723,1],[3748,1],[3750,1],[3774,2],[3781,1],[3783,1],[3790,2],[3802,2],[3808,32],[3912,1],[3949,4],[3992,1],[4029,1],[4045,1],[4059,37],[4294,1],[4296,5],[4302,2],[4681,1],[4686,2],[4695,1],[4697,1],[4702,2],[4745,1],[4750,2],[4785,1],[4790,2],[4799,1],[4801,1],[4806,2],[4823,1],[4881,1],[4886,2],[4955,2],[4989,3],[5018,6],[5110,2],[5118,2],[5789,3],[5881,7],[5901,1],[5909,11],[5943,9],[5972,12],[5997,1],[6001,1],[6004,12],[6110,2],[6122,6],[6138,6],[6159,1],[6170,6],[6265,7],[6315,5],[6390,10],[6431,1],[6444,4],[6460,4],[6465,3],[6510,2],[6517,11],[6572,4],[6602,6],[6619,3],[6684,2],[6751,1],[6781,2],[6794,6],[6810,6],[6830,2],[6847,65],[6988,4],[7037,3],[7156,8],[7224,3],[7242,3],[7305,7],[7355,2],[7368,8],[7419,5],[7674,1],[7958,2],[7966,2],[8006,2],[8014,2],[8024,1],[8026,1],[8028,1],[8030,1],[8062,2],[8117,1],[8133,1],[8148,2],[8156,1],[8176,2],[8181,1],[8191,1],[8293,1],[8306,2],[8335,1],[8349,3],[8384,16],[8433,15],[8588,4],[9255,25],[9291,21],[11124,2],[11158,2],[11311,1],[11359,1],[11508,5],[11558,1],[11560,5],[11566,2],[11624,7],[11633,14],[11671,9],[11687,1],[11695,1],[11703,1],[11711,1],[11719,1],[11727,1],[11735,1],[11743,1],[11856,48],[11930,1],[12020,12],[12246,26],[12284,4],[12352,1],[12439,2],[12544,5],[12592,1],[12687,1],[12731,5],[12772,12],[12831,1],[19894,10],[40944,16],[42125,3],[42183,9],[42540,20],[42744,8],[42944,2],[42951,48],[43052,4],[43066,6],[43128,8],[43206,8],[43226,6],[43348,11],[43389,3],[43470,1],[43482,4],[43519,1],[43575,9],[43598,2],[43610,2],[43715,24],[43767,10],[43783,2],[43791,2],[43799,9],[43815,1],[43823,1],[43880,8],[44014,2],[44026,6],[55204,12],[55239,4],[55292,4],[64110,2],[64218,38],[64263,12],[64280,5],[64311,1],[64317,1],[64319,1],[64322,1],[64325,1],[64450,17],[64832,16],[64912,2],[64968,40],[65022,2],[65050,6],[65107,1],[65127,1],[65132,4],[65141,1],[65277,2],[65280,1],[65471,3],[65480,2],[65488,2],[65496,2],[65501,3],[65511,1],[65519,10],[65534,2],[65548,1],[65575,1],[65595,1],[65598,1],[65614,2],[65630,34],[65787,5],[65795,4],[65844,3],[65935,1],[65948,4],[65953,47],[66046,130],[66205,3],[66257,15],[66300,4],[66340,9],[66379,5],[66427,5],[66462,1],[66500,4],[66518,42],[66718,2],[66730,6],[66772,4],[66812,4],[66856,8],[66916,11],[66928,144],[67383,9],[67414,10],[67432,152],[67590,2],[67593,1],[67638,1],[67641,3],[67645,2],[67670,1],[67743,8],[67760,48],[67827,1],[67830,5],[67868,3],[67898,5],[67904,64],[68024,4],[68048,2],[68100,1],[68103,5],[68116,1],[68120,1],[68150,2],[68155,4],[68169,7],[68185,7],[68256,32],[68327,4],[68343,9],[68406,3],[68438,2],[68467,5],[68498,7],[68509,12],[68528,80],[68681,55],[68787,13],[68851,7],[68904,8],[68922,294],[69247,129],[69416,8],[69466,134],[69623,9],[69710,4],[69744,15],[69826,11],[69838,2],[69865,7],[69882,6],[69941,1],[69959,9],[70007,9],[70094,2],[70112,1],[70133,11],[70162,1],[70207,65],[70279,1],[70281,1],[70286,1],[70302,1],[70314,6],[70379,5],[70394,6],[70404,1],[70413,2],[70417,2],[70441,1],[70449,1],[70452,1],[70458,1],[70469,2],[70473,2],[70478,2],[70481,6],[70488,5],[70500,2],[70509,3],[70517,139],[70746,1],[70748,1],[70752,32],[70856,8],[70874,166],[71094,2],[71134,34],[71237,11],[71258,6],[71277,19],[71353,7],[71370,54],[71451,2],[71468,4],[71488,192],[71740,100],[71923,12],[71936,160],[72104,2],[72152,2],[72165,27],[72264,8],[72355,29],[72441,263],[72713,1],[72759,1],[72774,10],[72813,3],[72848,2],[72872,1],[72887,73],[72967,1],[72970,1],[73015,3],[73019,1],[73022,1],[73032,8],[73050,6],[73062,1],[73065,1],[73103,1],[73106,1],[73113,7],[73130,310],[73465,199],[73714,13],[74650,102],[74863,1],[74869,11],[75076,2748],[78895,1],[78905,4039],[83527,8633],[92729,7],[92767,1],[92778,4],[92784,96],[92910,2],[92918,10],[92998,10],[93018,1],[93026,1],[93048,5],[93072,688],[93851,101],[94027,4],[94088,7],[94112,64],[94180,28],[100344,8],[101107,9485],[110879,49],[110931,17],[110952,8],[111356,2308],[113771,5],[113789,3],[113801,7],[113818,2],[113828,4956],[119030,10],[119079,2],[119273,23],[119366,154],[119540,12],[119639,9],[119673,135],[119893,1],[119965,1],[119968,2],[119971,2],[119975,2],[119981,1],[119994,1],[119996,1],[120004,1],[120070,1],[120075,2],[120085,1],[120093,1],[120122,1],[120127,1],[120133,1],[120135,3],[120145,1],[120486,2],[120780,2],[121484,15],[121504,1],[121520,1360],[122887,1],[122905,2],[122914,1],[122917,1],[122923,213],[123181,3],[123198,2],[123210,4],[123216,368],[123642,5],[123648,1280],[125125,2],[125143,41],[125260,4],[125274,4],[125280,785],[126133,76],[126270,194],[126468,1],[126496,1],[126499,1],[126501,2],[126504,1],[126515,1],[126520,1],[126522,1],[126524,6],[126531,4],[126536,1],[126538,1],[126540,1],[126544,1],[126547,1],[126549,2],[126552,1],[126554,1],[126556,1],[126558,1],[126560,1],[126563,1],[126565,2],[126571,1],[126579,1],[126584,1],[126589,1],[126591,1],[126602,1],[126620,5],[126628,1],[126634,1],[126652,52],[126706,270],[127020,4],[127124,12],[127151,2],[127168,1],[127184,1],[127222,10],[127245,3],[127341,3],[127405,57],[127491,13],[127548,4],[127561,7],[127570,14],[127590,154],[128726,10],[128749,3],[128763,5],[128884,12],[128985,7],[129004,20],[129036,4],[129096,8],[129114,6],[129160,8],[129198,82],[129292,1],[129394,1],[129399,3],[129443,2],[129451,3],[129483,2],[129620,12],[129646,2],[129652,4],[129659,5],[129667,13],[129686,1386],[173783,41],[177973,11],[178206,2],[183970,14],[191457,3103],[195102,722403],[917506,30],[917632,128],[918000,65040],[1048574,2]]} +$B.unicode_casefold={223:[115,115],304:[105,775],329:[700,110],496:[106,780],912:[953,776,769],944:[965,776,769],1415:[1381,1410],7830:[104,817],7831:[116,776],7832:[119,778],7833:[121,778],7834:[97,702],7838:[223],8016:[965,787],8018:[965,787,768],8020:[965,787,769],8022:[965,787,834],8064:[7936,953],8065:[7937,953],8066:[7938,953],8067:[7939,953],8068:[7940,953],8069:[7941,953],8070:[7942,953],8071:[7943,953],8072:[8064],8073:[8065],8074:[8066],8075:[8067],8076:[8068],8077:[8069],8078:[8070],8079:[8071],8080:[7968,953],8081:[7969,953],8082:[7970,953],8083:[7971,953],8084:[7972,953],8085:[7973,953],8086:[7974,953],8087:[7975,953],8088:[8080],8089:[8081],8090:[8082],8091:[8083],8092:[8084],8093:[8085],8094:[8086],8095:[8087],8096:[8032,953],8097:[8033,953],8098:[8034,953],8099:[8035,953],8100:[8036,953],8101:[8037,953],8102:[8038,953],8103:[8039,953],8104:[8096],8105:[8097],8106:[8098],8107:[8099],8108:[8100],8109:[8101],8110:[8102],8111:[8103],8114:[8048,953],8115:[945,953],8116:[940,953],8118:[945,834],8119:[945,834,953],8124:[8115],8130:[8052,953],8131:[951,953],8132:[942,953],8134:[951,834],8135:[951,834,953],8140:[8131],8146:[953,776,768],8147:[953,776,769],8150:[953,834],8151:[953,776,834],8162:[965,776,768],8163:[965,776,769],8164:[961,787],8166:[965,834],8167:[965,776,834],8178:[8060,953],8179:[969,953],8180:[974,953],8182:[969,834],8183:[969,834,953],8188:[8179],64256:[102,102],64257:[102,105],64258:[102,108],64259:[102,102,105],64260:[102,102,108],64261:[115,116],64262:[115,116],64275:[1396,1398],64276:[1396,1381],64277:[1396,1387],64278:[1406,1398],64279:[1396,1389]} +$B.unicode_bidi_whitespace=[9,10,11,12,13,28,29,30,31,32,133,5760,6158,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8287,12288] +$B.unicode_identifiers={"XID_Start":[95,[65,26],[97,26],170,181,186,[192,23],[216,31],[248,458],[710,12],[736,5],748,750,[880,5],[886,2],[891,3],902,[904,3],908,[910,20],[931,83],[1015,139],[1162,156],[1329,38],1369,[1377,39],[1488,27],[1520,3],[1569,42],[1646,2],[1649,99],1749,[1765,2],[1774,2],[1786,3],1791,1808,[1810,30],[1869,89],1969,[1994,33],[2036,2],2042,[2048,22],2074,2084,2088,[2308,54],2365,2384,[2392,10],2417,2418,[2425,7],[2437,8],[2447,2],[2451,22],[2474,7],2482,[2486,4],2493,2510,[2524,2],[2527,3],[2544,2],[2565,6],[2575,2],[2579,22],[2602,7],[2610,2],[2613,2],[2616,2],[2649,4],2654,[2674,3],[2693,9],[2703,3],[2707,22],[2730,7],[2738,2],[2741,5],2749,2768,[2784,2],[2821,8],[2831,2],[2835,22],[2858,7],[2866,2],[2869,5],2877,[2908,2],[2911,3],2929,2947,[2949,6],[2958,3],[2962,4],[2969,2],2972,[2974,2],[2979,2],[2984,3],[2990,12],3024,[3077,8],[3086,3],[3090,23],[3114,10],[3125,5],3133,[3160,2],[3168,2],[3205,8],[3214,3],[3218,23],[3242,10],[3253,5],3261,3294,[3296,2],[3333,8],[3342,3],[3346,23],[3370,16],3389,[3424,2],[3450,6],[3461,18],[3482,24],[3507,9],3517,[3520,7],[3585,48],3634,[3648,7],[3713,2],3716,[3719,2],3722,3725,[3732,4],[3737,7],[3745,3],3749,3751,[3754,2],[3757,4],3762,3773,[3776,5],3782,[3804,2],3840,[3904,8],[3913,36],[3976,4],[4096,43],4159,[4176,6],[4186,4],4193,[4197,2],[4206,3],[4213,13],4238,[4256,38],[4304,43],4348,[4352,329],[4682,4],[4688,7],4696,[4698,4],[4704,41],[4746,4],[4752,33],[4786,4],[4792,7],4800,[4802,4],[4808,15],[4824,57],[4882,4],[4888,67],[4992,16],[5024,85],[5121,620],[5743,17],[5761,26],[5792,75],[5870,3],[5888,13],[5902,4],[5920,18],[5952,18],[5984,13],[5998,3],[6016,52],6103,6108,[6176,88],[6272,41],6314,[6320,70],[6400,29],[6480,30],[6512,5],[6528,44],[6593,7],[6656,23],[6688,53],6823,[6917,47],[6981,7],[7043,30],[7086,2],[7168,36],[7245,3],[7258,36],[7401,4],[7406,4],[7424,192],[7680,278],[7960,6],[7968,38],[8008,6],[8016,8],8025,8027,8029,[8031,31],[8064,53],[8118,7],8126,[8130,3],[8134,7],[8144,4],[8150,6],[8160,13],[8178,3],[8182,7],8305,8319,[8336,5],8450,8455,[8458,10],8469,8472,[8473,5],8484,8486,8488,[8490,16],[8508,4],[8517,5],8526,[8544,41],[11264,47],[11312,47],[11360,133],[11499,4],[11520,38],[11568,54],11631,[11648,23],[11680,7],[11688,7],[11696,7],[11704,7],[11712,7],[11720,7],[11728,7],[11736,7],12293,12294,12295,[12321,9],[12337,5],[12344,5],[12353,86],[12445,3],[12449,90],[12540,4],[12549,41],[12593,94],[12704,24],[12784,16],[13312,6582],[19968,20940],[40960,1165],[42192,46],[42240,269],[42512,16],[42538,2],[42560,32],[42594,13],42623,[42624,24],[42656,80],[42775,9],[42786,103],[42891,2],[43003,7],[43011,3],[43015,4],[43020,23],[43072,52],[43138,50],[43250,6],43259,[43274,28],[43312,23],[43360,29],[43396,47],43471,[43520,41],[43584,3],[43588,8],[43616,23],43642,[43648,48],43697,[43701,2],[43705,5],43712,43714,[43739,3],[43968,35],[44032,11172],[55216,23],[55243,49],[63744,302],[64048,62],[64112,106],[64256,7],[64275,5],64285,[64287,10],[64298,13],[64312,5],64318,[64320,2],[64323,2],[64326,108],[64467,139],[64612,218],[64848,64],[64914,54],[65008,10],65137,65139,65143,65145,65147,65149,[65151,126],[65313,26],[65345,26],[65382,56],[65440,31],[65474,6],[65482,6],[65490,6],[65498,3],[65536,12],[65549,26],[65576,19],[65596,2],[65599,15],[65616,14],[65664,123],[65856,53],[66176,29],[66208,49],[66304,31],[66352,27],[66432,30],[66464,36],[66504,8],[66513,5],[66560,158],[67584,6],67592,[67594,44],[67639,2],67644,[67647,23],[67840,22],[67872,26],68096,[68112,4],[68117,3],[68121,27],[68192,29],[68352,54],[68416,22],[68448,19],[68608,73],[69763,45],[73728,879],[74752,99],[77824,1071],[119808,85],[119894,71],[119966,2],119970,[119973,2],[119977,4],[119982,12],119995,[119997,7],[120005,65],[120071,4],[120077,8],[120086,7],[120094,28],[120123,4],[120128,5],120134,[120138,7],[120146,340],[120488,25],[120514,25],[120540,31],[120572,25],[120598,31],[120630,25],[120656,31],[120688,25],[120714,31],[120746,25],[120772,8],[131072,42711],[173824,4149],[194560,542]],"XID_Continue":[[48,10],[65,26],95,[97,26],170,181,183,186,[192,23],[216,31],[248,458],[710,12],[736,5],748,750,[768,117],[886,2],[891,3],902,903,[904,3],908,[910,20],[931,83],[1015,139],[1155,5],[1162,156],[1329,38],1369,[1377,39],[1425,45],1471,[1473,2],[1476,2],1479,[1488,27],[1520,3],[1552,11],[1569,62],[1632,10],[1646,102],1749,[1750,7],[1759,10],[1770,19],1791,1808,1809,[1810,57],[1869,101],[1984,54],2042,[2048,46],[2304,58],2364,2365,[2366,17],2384,[2385,5],[2392,12],[2406,10],2417,2418,[2425,7],2433,[2434,2],[2437,8],[2447,2],[2451,22],[2474,7],2482,[2486,4],2492,2493,[2494,7],[2503,2],[2507,4],2519,[2524,2],[2527,5],[2534,12],[2561,3],[2565,6],[2575,2],[2579,22],[2602,7],[2610,2],[2613,2],[2616,2],2620,[2622,5],[2631,2],[2635,3],2641,[2649,4],2654,[2662,16],[2689,3],[2693,9],[2703,3],[2707,22],[2730,7],[2738,2],[2741,5],2748,2749,[2750,8],[2759,3],[2763,3],2768,[2784,4],[2790,10],2817,[2818,2],[2821,8],[2831,2],[2835,22],[2858,7],[2866,2],[2869,5],2876,2877,2878,2879,2880,[2881,4],[2887,2],[2891,3],2902,2903,[2908,2],[2911,5],[2918,10],2929,2946,2947,[2949,6],[2958,3],[2962,4],[2969,2],2972,[2974,2],[2979,2],[2984,3],[2990,12],[3006,5],[3014,3],[3018,4],3024,3031,[3046,10],[3073,3],[3077,8],[3086,3],[3090,23],[3114,10],[3125,5],3133,[3134,7],[3142,3],[3146,4],[3157,2],[3160,2],[3168,4],[3174,10],[3202,2],[3205,8],[3214,3],[3218,23],[3242,10],[3253,5],3260,3261,3262,3263,[3264,5],3270,[3271,2],[3274,4],[3285,2],3294,[3296,4],[3302,10],[3330,2],[3333,8],[3342,3],[3346,23],[3370,16],3389,[3390,7],[3398,3],[3402,4],3415,[3424,4],[3430,10],[3450,6],[3458,2],[3461,18],[3482,24],[3507,9],3517,[3520,7],3530,[3535,6],3542,[3544,8],[3570,2],[3585,58],[3648,15],[3664,10],[3713,2],3716,[3719,2],3722,3725,[3732,4],[3737,7],[3745,3],3749,3751,[3754,2],[3757,13],[3771,3],[3776,5],3782,[3784,6],[3792,10],[3804,2],3840,[3864,2],[3872,10],3893,3895,3897,[3902,10],[3913,36],[3953,20],[3974,6],[3984,8],[3993,36],4038,[4096,74],[4176,78],[4256,38],[4304,43],4348,[4352,329],[4682,4],[4688,7],4696,[4698,4],[4704,41],[4746,4],[4752,33],[4786,4],[4792,7],4800,[4802,4],[4808,15],[4824,57],[4882,4],[4888,67],4959,[4969,9],[4992,16],[5024,85],[5121,620],[5743,17],[5761,26],[5792,75],[5870,3],[5888,13],[5902,7],[5920,21],[5952,20],[5984,13],[5998,3],[6002,2],[6016,52],6070,[6071,29],6103,6108,6109,[6112,10],[6155,3],[6160,10],[6176,88],[6272,43],[6320,70],[6400,29],[6432,12],[6448,12],[6470,40],[6512,5],[6528,44],[6576,26],[6608,11],[6656,28],[6688,63],6752,6753,6754,[6755,26],6783,[6784,10],[6800,10],6823,[6912,76],[6992,10],[7019,9],[7040,43],[7086,12],[7168,56],[7232,10],[7245,49],[7376,3],[7380,31],[7424,231],[7677,281],[7960,6],[7968,38],[8008,6],[8016,8],8025,8027,8029,[8031,31],[8064,53],[8118,7],8126,[8130,3],[8134,7],[8144,4],[8150,6],[8160,13],[8178,3],[8182,7],[8255,2],8276,8305,8319,[8336,5],[8400,13],8417,[8421,12],8450,8455,[8458,10],8469,8472,[8473,5],8484,8486,8488,[8490,16],[8508,4],[8517,5],8526,[8544,41],[11264,47],[11312,47],[11360,133],[11499,7],[11520,38],[11568,54],11631,[11648,23],[11680,7],[11688,7],[11696,7],[11704,7],[11712,7],[11720,7],[11728,7],[11736,7],[11744,32],12293,12294,12295,[12321,15],[12337,5],[12344,5],[12353,86],[12441,2],[12445,3],[12449,90],[12540,4],[12549,41],[12593,94],[12704,24],[12784,16],[13312,6582],[19968,20940],[40960,1165],[42192,46],[42240,269],[42512,28],[42560,32],[42594,14],[42620,2],42623,[42624,24],[42656,82],[42775,9],[42786,103],[42891,2],[43003,45],[43072,52],[43136,69],[43216,10],[43232,24],43259,[43264,46],[43312,36],[43360,29],[43392,65],43471,[43472,10],[43520,55],[43584,14],[43600,10],[43616,23],43642,43643,[43648,67],[43739,3],[43968,43],44012,44013,[44016,10],[44032,11172],[55216,23],[55243,49],[63744,302],[64048,62],[64112,106],[64256,7],[64275,5],64285,64286,[64287,10],[64298,13],[64312,5],64318,[64320,2],[64323,2],[64326,108],[64467,139],[64612,218],[64848,64],[64914,54],[65008,10],[65024,16],[65056,7],[65075,2],[65101,3],65137,65139,65143,65145,65147,65149,[65151,126],[65296,10],[65313,26],65343,[65345,26],[65382,89],[65474,6],[65482,6],[65490,6],[65498,3],[65536,12],[65549,26],[65576,19],[65596,2],[65599,15],[65616,14],[65664,123],[65856,53],66045,[66176,29],[66208,49],[66304,31],[66352,27],[66432,30],[66464,36],[66504,8],[66513,5],[66560,158],[66720,10],[67584,6],67592,[67594,44],[67639,2],67644,[67647,23],[67840,22],[67872,26],68096,[68097,3],[68101,2],[68108,8],[68117,3],[68121,27],[68152,3],68159,[68192,29],[68352,54],[68416,22],[68448,19],[68608,73],[69760,59],[73728,879],[74752,99],[77824,1071],[119141,5],[119149,6],[119163,8],[119173,7],[119210,4],[119362,3],[119808,85],[119894,71],[119966,2],119970,[119973,2],[119977,4],[119982,12],119995,[119997,7],[120005,65],[120071,4],[120077,8],[120086,7],[120094,28],[120123,4],[120128,5],120134,[120138,7],[120146,340],[120488,25],[120514,25],[120540,31],[120572,25],[120598,31],[120630,25],[120656,31],[120688,25],[120714,31],[120746,25],[120772,8],[120782,50],[131072,42711],[173824,4149],[194560,542],[917760,240]]} +$B.unicode_tables={} +for(var gc in $B.unicode){$B.unicode_tables[gc]={} +$B.unicode[gc].forEach(function(item){if(Array.isArray(item)){var step=item[2]||1 +for(var i=0,nb=item[1];i < nb;i+=1){$B.unicode_tables[gc][item[0]+i*step]=true}}else{$B.unicode_tables[gc][item]=true}})} +for(var key in $B.unicode_identifiers){$B.unicode_tables[key]={} +for(const item of $B.unicode_identifiers[key]){if(Array.isArray(item)){for(var i=0;i < item[1];i++){$B.unicode_tables[key][item[0]+i]=true}}else{$B.unicode_tables[key][item]=true}}} +$B.is_unicode_cn=function(i){ +var cn=$B.unicode.Cn +for(var j=0,len=cn.length;j < len;j++){if(i >=cn[j][0]){if(i < cn[j][0]+cn[j][1]){return true}} +return false} +return false} +; ;(function($B){var bltns=$B.InjectBuiltins() eval(bltns) -if(!String.prototype.trim){ -String.prototype.trim=function(){var c -for(var i=0;i < this.length;i++){c=this.charCodeAt(i) -if([32,10,13,9,12,11,160,5760,6158,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8239,8287,12288,65279].indexOf(c)>-1){continue}else{break}} -for(var j=this.length-1;j >=i;j--){c=this.charCodeAt(j) -if([32,10,13,9,12,11,160,5760,6158,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8239,8287,12288,65279].indexOf(c)>-1){continue}else{break}} -return this.substring(i,j+1);}} -if(!String.prototype.trimLeft){ -String.prototype.trimLeft=function(){var c -for(var i=0;i < this.length;i++){c=this.charCodeAt(i) -if([32,10,13,9,12,11,160,5760,6158,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8239,8287,12288,65279].indexOf(c)>-1){continue}else{break}} -return this.substring(i)}} -if(!String.prototype.trimRight){String.prototype.trimRight=function(){ -var c -for(var j=this.length-1;j >=0;j--){c=this.charCodeAt(j) -if([32,10,13,9,12,11,160,5760,6158,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8232,8233,8239,8287,12288,65279].indexOf(c)>-1){continue}else{break}} -return this.substring(0,j+1)}} -var object=_b_.object +var unicode_tables=$B.unicode_tables var str={__class__:_b_.type,__dir__:object.__dir__,$infos:{__module__:"builtins",__name__:"str"},$is_class:true,$native:true} function normalize_start_end($){if($.start===null ||$.start===_b_.None){$.start=0} else if($.start < 0){$.start+=$.self.length @@ -10999,9 +11803,9 @@ str.__add__=function(self,other){if(!(typeof other==="string")){try{return getat catch(err){throw _b_.TypeError.$factory("Can't convert "+ $B.class_name(other)+" to str implicitly")}} return self+other} -str.__contains__=function(self,item){if(!(typeof item=="string")){throw _b_.TypeError.$factory("'in ' requires "+ +str.__contains__=function(self,item){if(! _b_.isinstance(item,str)){throw _b_.TypeError.$factory("'in ' requires "+ "string as left operand, not "+item.__class__)} -var nbcar=item.length +if(typeof item=="string"){var nbcar=item.length}else{var nbcar=_b_.len(item)} if(nbcar==0){return true} if(self.length==0){return nbcar==0} for(var i=0,len=self.length;i < len;i++){if(self.substr(i,nbcar)==item){return true}} @@ -11019,6 +11823,7 @@ return self} str.__format__=function(self,format_spec){var fmt=new $B.parse_format_spec(format_spec) if(fmt.sign !==undefined){throw _b_.ValueError.$factory( "Sign not allowed in string format specifier")} +if(fmt.precision){self=self.substr(0,fmt.precision)} fmt.align=fmt.align ||"<" return $B.format_width(preformat(self,fmt),fmt)} str.__getitem__=function(self,arg){if(isinstance(arg,_b_.int)){var pos=arg @@ -11033,7 +11838,8 @@ for(var i=start;i > stop;i+=step){res+=self.charAt(i)}} return res} if(isinstance(arg,_b_.bool)){return self.__getitem__(_b_.int.$factory(arg))} throw _b_.TypeError.$factory("string indices must be integers")} -var prefix=2,suffix=3,mask=(2**32-1) +var prefix=2,suffix=3,mask=(2**32-1),str_hash_cache={} +str.$nb_str_hash_cache=0 function fnv(p){if(p.length==0){return 0} var x=prefix x=(x ^(p.charCodeAt(0)<< 7))& mask @@ -11042,7 +11848,12 @@ x=(x ^ p.length)& mask x=(x ^ suffix)& mask if(x==-1){x=-2} return x} -str.__hash__=function(self){return fnv(self)} +str.__hash__=function(self){if(str_hash_cache[self]!==undefined){return str_hash_cache[self]} +str.$nb_str_hash_cache++ +if(str.$nb_str_hash_cache > 100000){ +str.$nb_str_hash_cache=0 +str_hash_cache={}} +return str_hash_cache[self]=fnv(self)} str.__init__=function(self,arg){self.valueOf=function(){return arg} self.toString=function(){return arg} return _b_.None} @@ -11168,8 +11979,14 @@ if(sign !==""){ret=sign+format_padding(ret,flags,true)}} if(flags.alternate){if(ret.charAt(0)==="-"){ret="-0o"+ret.slice(1)} else{ret="0o"+ret}} return format_padding(ret,flags)} -var single_char_format=function(val,flags){if(isinstance(val,str)&& val.length==1)return val -try{val=_b_.int.$factory(val)}catch(err){throw _b_.TypeError.$factory("%c requires int or char")} +function series_of_bytes(val,flags){if(val.__class__ && val.__class__.$buffer_protocol){var it=_b_.iter(val),ints=[] +while(true){try{ints.push(_b_.next(it))}catch(err){if(err.__class__===_b_.StopIteration){var b=_b_.bytes.$factory(ints) +return format_padding(_b_.bytes.decode(b,"ascii"),flags)} +throw err}}}else{try{bytes_obj=$B.$getattr(val,"__bytes__") +return format_padding(_b_.bytes.decode(bytes_obj),flags)}catch(err){if(err.__class__===_b_.AttributeError){throw _b_.TypeError.$factory("%b does not accept '"+ +$B.class_name(val)+"'")} +throw err}}} +var single_char_format=function(val,flags){if(isinstance(val,str)&& val.length==1){return val}else if(isinstance(val,bytes)&& val.source.length==1){val=val.source[0]}else{try{val=_b_.int.$factory(val)}catch(err){throw _b_.TypeError.$factory("%c requires int or char")}} return format_padding(chr(val),flags)} var num_flag=function(c,flags){if(c==="0" && ! flags.padding && ! flags.decimal_point && ! flags.left){flags.pad_char="0" return} @@ -11182,7 +11999,7 @@ flags.left=true} var space_flag=function(val,flags){flags.space=true} var sign_flag=function(val,flags){flags.sign=true} var alternate_flag=function(val,flags){flags.alternate=true} -var char_mapping={"s":str_format,"d":num_format,"i":num_format,"u":num_format,"o":octal_format,"r":repr_format,"a":ascii_format,"g":function(val,flags){return floating_point_format(val,false,flags)},"G":function(val,flags){return floating_point_format(val,true,flags)},"f":function(val,flags){return floating_point_decimal_format(val,false,flags)},"F":function(val,flags){return floating_point_decimal_format(val,true,flags)},"e":function(val,flags){return floating_point_exponential_format(val,false,flags)},"E":function(val,flags){return floating_point_exponential_format(val,true,flags)},"x":function(val,flags){return signed_hex_format(val,false,flags)},"X":function(val,flags){return signed_hex_format(val,true,flags)},"c":single_char_format,"0":function(val,flags){return num_flag("0",flags)},"1":function(val,flags){return num_flag("1",flags)},"2":function(val,flags){return num_flag("2",flags)},"3":function(val,flags){return num_flag("3",flags)},"4":function(val,flags){return num_flag("4",flags)},"5":function(val,flags){return num_flag("5",flags)},"6":function(val,flags){return num_flag("6",flags)},"7":function(val,flags){return num_flag("7",flags)},"8":function(val,flags){return num_flag("8",flags)},"9":function(val,flags){return num_flag("9",flags)},"-":neg_flag," ":space_flag,"+":sign_flag,".":decimal_point_flag,"#":alternate_flag} +var char_mapping={"b":series_of_bytes,"s":str_format,"d":num_format,"i":num_format,"u":num_format,"o":octal_format,"r":repr_format,"a":ascii_format,"g":function(val,flags){return floating_point_format(val,false,flags)},"G":function(val,flags){return floating_point_format(val,true,flags)},"f":function(val,flags){return floating_point_decimal_format(val,false,flags)},"F":function(val,flags){return floating_point_decimal_format(val,true,flags)},"e":function(val,flags){return floating_point_exponential_format(val,false,flags)},"E":function(val,flags){return floating_point_exponential_format(val,true,flags)},"x":function(val,flags){return signed_hex_format(val,false,flags)},"X":function(val,flags){return signed_hex_format(val,true,flags)},"c":single_char_format,"0":function(val,flags){return num_flag("0",flags)},"1":function(val,flags){return num_flag("1",flags)},"2":function(val,flags){return num_flag("2",flags)},"3":function(val,flags){return num_flag("3",flags)},"4":function(val,flags){return num_flag("4",flags)},"5":function(val,flags){return num_flag("5",flags)},"6":function(val,flags){return num_flag("6",flags)},"7":function(val,flags){return num_flag("7",flags)},"8":function(val,flags){return num_flag("8",flags)},"9":function(val,flags){return num_flag("9",flags)},"-":neg_flag," ":space_flag,"+":sign_flag,".":decimal_point_flag,"#":alternate_flag} var UnsupportedChar=function(){this.name="UnsupportedChar"} str.__mod__=function(self,args){var length=self.length,pos=0 |0,argpos=null,getitem if(_b_.isinstance(args,_b_.tuple)){argpos=0 |0}else{getitem=_b_.getattr(args,"__getitem__",_b_.None)} @@ -11193,7 +12010,7 @@ var rslt=kwarg_key.exec(s.substring(newpos)) if(! rslt){throw _b_.ValueError.$factory("incomplete format key")} var key=rslt[1] newpos+=rslt[0].length -try{var self=getitem(key)}catch(err){if(err.name==="KeyError"){throw err} +try{var self=getitem(key)}catch(err){if(err.__class__===_b_.KeyError){throw err} throw _b_.TypeError.$factory("format requires a mapping")} return get_string_value(s,self)} var $get_arg_string=function(s){ @@ -11246,11 +12063,17 @@ str.__repr__=function(self){var res=self res=self.replace(/\\/g,"\\\\") res=res.replace(new RegExp("\u0007","g"),"\\x07"). replace(new RegExp("\b","g"),"\\x08"). +replace(new RegExp("\u000b","g"),"\\x0b"). replace(new RegExp("\f","g"),"\\x0c"). replace(new RegExp("\n","g"),"\\n"). replace(new RegExp("\r","g"),"\\r"). replace(new RegExp("\t","g"),"\\t") res=res.replace(combining_re,"\u200B$1") +var repl='' +for(var i=0;i < res.length;i++){if($B.is_unicode_cn(res.codePointAt(i))){var s=res.codePointAt(i).toString(16) +while(s.length < 4){s='0'+s} +repl+='\\u'+s}else{repl+=res.charAt(i)}} +res=repl if(res.search('"')==-1 && res.search("'")==-1){return "'"+res+"'"}else if(self.search('"')==-1){return '"'+res+'"'} var qesc=new RegExp("'","g") res="'"+res.replace(qesc,"\\'")+"'" @@ -11270,9 +12093,14 @@ for(var $op in $comps){eval("str.__"+$comps[$op]+'__ = '+$comp_func.replace(/>/g $B.make_rmethods(str) var $notimplemented=function(self,other){throw NotImplementedError.$factory( "OPERATOR not implemented for class str")} -var from_unicode=["title","capitalize","casefold","islower","isupper","istitle","isspace","isalpha","isalnum","isdecimal","isdigit","isnumeric","isidentifier","isprintable","lower","swapcase","upper" -] -from_unicode.forEach(function(name){str[name]=unicode[name]}) +str.capitalize=function(self){var $=$B.args("capitalize",1,{self},["self"],arguments,{},null,null) +if(self.length==0){return ""} +return self.charAt(0).toUpperCase()+self.substr(1)} +str.casefold=function(self){var $=$B.args("casefold",1,{self},["self"],arguments,{},null,null),res="",char,cf +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +cf=$B.unicode_casefold[char] +if(cf){cf.forEach(function(cp){res+=String.fromCharCode(cp)})}else{res+=self.charAt(i).toLowerCase()}} +return res} str.center=function(){var $=$B.args("center",3,{self:null,width:null,fillchar:null},["self","width","fillchar"],arguments,{fillchar:" "},null,null),self=$.self if($.width <=self.length){return self} var pad=parseInt(($.width-self.length)/2),res=$.fillchar.repeat(pad) @@ -11417,7 +12245,7 @@ if(fmt.conv=="a"){value=_b_.ascii(value)} else if(fmt.conv=="r"){value=_b_.repr(value)} else if(fmt.conv=="s"){value=_b_.str.$factory(value)} if(value.$is_class ||value.$factory){ -res+=value.__class__.__format__(value,fmt.spec)}else{res+=_b_.getattr(value,"__format__")(fmt.spec)}} +res+=value.__class__.__format__(value,fmt.spec)}else{res+=$B.$getattr(value,"__format__")(fmt.spec)}} return res} str.format_map=function(self){throw NotImplementedError.$factory( "function format_map not implemented yet")} @@ -11428,6 +12256,77 @@ return res} str.isascii=function(self){ for(var i=0,len=self.length;i < len;i++){if(self.charCodeAt(i)> 127){return false}} return true} +str.isalnum=function(self){ +var $=$B.args("isalnum",1,{self:null},["self"],arguments,{},null,null),char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(unicode_tables.Ll[char]|| +unicode_tables.Lu[char]|| +unicode_tables.Lm[char]|| +unicode_tables.Lt[char]|| +unicode_tables.Lo[char]|| +unicode_tables.Nd[char]|| +unicode_tables.digits[char]|| +unicode_tables.numeric[char]){continue} +return false} +return true} +str.isalpha=function(self){ +var $=$B.args("isalpha",1,{self:null},["self"],arguments,{},null,null),char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(unicode_tables.Ll[char]|| +unicode_tables.Lu[char]|| +unicode_tables.Lm[char]|| +unicode_tables.Lt[char]|| +unicode_tables.Lo[char]){continue} +return false} +return true} +str.isdecimal=function(self){ +var $=$B.args("isdecimal",1,{self:null},["self"],arguments,{},null,null),char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(! unicode_tables.Nd[char]){return false}} +return self.length > 0} +str.isdigit=function(self){ +var $=$B.args("isdigit",1,{self:null},["self"],arguments,{},null,null),char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(! unicode_tables.digits[char]){return false}} +return self.length > 0} +str.isidentifier=function(self){ +var $=$B.args("isidentifier",1,{self:null},["self"],arguments,{},null,null),char +if(self.length==0){return false} +else if(unicode_tables.XID_Start[self.charCodeAt(0)]===undefined){return false}else{for(var i=1,len=self.length;i < len;i++){if(unicode_tables.XID_Continue[self.charCodeAt(i)]===undefined){return false}}} +return true} +str.islower=function(self){ +var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),has_cased=false,char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(unicode_tables.Ll[char]){has_cased=true;continue} +else if(unicode_tables.Lu[char]||unicode_tables.Lt[char]){return false}} +return has_cased} +str.isnumeric=function(self){ +var $=$B.args("isnumeric",1,{self:null},["self"],arguments,{},null,null) +for(var i=0,len=self.length;i < len;i++){if(! unicode_tables.numeric[self.charCodeAt(i)]){return false}} +return self.length > 0} +var unprintable={},unprintable_gc=['Cc','Cf','Co','Cs','Zl','Zp','Zs'] +str.isprintable=function(self){ +if(Object.keys(unprintable).length==0){for(var i=0;i < unprintable_gc.length;i++){var table=unicode_tables[unprintable_gc[i]] +for(var cp in table){unprintable[cp]=true}} +unprintable[32]=true} +var $=$B.args("isprintable",1,{self:null},["self"],arguments,{},null,null) +for(var i=0,len=self.length;i < len;i++){if(unprintable[self.charCodeAt(i)]){return false}} +return true} +str.isspace=function(self){ +var $=$B.args("isspace",1,{self:null},["self"],arguments,{},null,null),char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(! unicode_tables.Zs[char]&& +$B.unicode_bidi_whitespace.indexOf(char)==-1){return false}} +return self.length > 0} +str.istitle=function(self){ +var $=$B.args("istitle",1,{self:null},["self"],arguments,{},null,null) +return self.length > 0 && str.title(self)==self} +str.isupper=function(self){ +var $=$B.args("islower",1,{self:null},["self"],arguments,{},null,null),has_cased=false,char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(unicode_tables.Lu[char]){has_cased=true;continue} +else if(unicode_tables.Ll[char]||unicode_tables.Lt[char]){return false}} +return has_cased} str.join=function(){var $=$B.args("join",2,{self:null,iterable:null},["self","iterable"],arguments,{},null,null) var iterable=_b_.iter($.iterable),res=[],count=0 while(1){try{var obj2=_b_.next(iterable) @@ -11440,12 +12339,14 @@ return res.join($.self)} str.ljust=function(self){var $=$B.args("ljust",3,{self:null,width:null,fillchar:null},["self","width","fillchar"],arguments,{fillchar:" "},null,null) if($.width <=self.length){return self} return self+$.fillchar.repeat($.width-self.length)} +str.lower=function(self){var $=$B.args("lower",1,{self:null},["self"],arguments,{},null,null) +return self.toLowerCase()} str.lstrip=function(self,x){var $=$B.args("lstrip",2,{self:null,chars:null},["self","chars"],arguments,{chars:_b_.None},null,null) if($.chars===_b_.None){return $.self.trimLeft()} for(var i=0;i < $.self.length;i++){if($.chars.indexOf($.self.charAt(i))===-1){return $.self.substring(i)}} return ""} str.maketrans=function(){var $=$B.args("maketrans",3,{x:null,y:null,z:null},["x","y","z"],arguments,{y:null,z:null},null,null) -var _t=_b_.dict.$factory() +var _t=$B.empty_dict() if($.y===null && $.z===null){ if(! _b_.isinstance($.x,_b_.dict)){throw _b_.TypeError.$factory( "maketrans only argument must be a dict")} @@ -11474,6 +12375,16 @@ check_str($.sep) var i=$.self.indexOf($.sep) if(i==-1){return _b_.tuple.$factory([$.self,"",""])} return _b_.tuple.$factory([$.self.substring(0,i),$.sep,$.self.substring(i+$.sep.length)])} +str.removeprefix=function(){var $=$B.args("removeprefix",2,{self:null,prefix:null},["self","prefix"],arguments,{},null,null) +if(!_b_.isinstance($.prefix,str)){throw _b_.ValueError.$factory("prefix should be str, not "+ +`'${$B.class_name($.prefix)}'`)} +if(str.startswith($.self,$.prefix)){return $.self.substr($.prefix.length)} +return $.self.substr(0)} +str.removesuffix=function(){var $=$B.args("removesuffix",2,{self:null,prefix:null},["self","suffix"],arguments,{},null,null) +if(!_b_.isinstance($.suffix,str)){throw _b_.ValueError.$factory("suffix should be str, not "+ +`'${$B.class_name($.prefix)}'`)} +if($.suffix.length > 0 && str.endswith($.self,$.suffix)){return $.self.substr(0,$.self.length-$.suffix.length)} +return $.self.substr(0)} function $re_escape(str){var specials="[.*+?|()$^" for(var i=0,len=specials.length;i < len;i++){var re=new RegExp("\\"+specials.charAt(i),"g") str=str.replace(re,"\\"+specials.charAt(i))} @@ -11560,20 +12471,16 @@ s=""}else{s+=self.charAt(pos) pos++}} res.push(s) return res}} -str.splitlines=function(self){var $=$B.args("splitlines",2,{self:null,keepends:null},["self","keepends"],arguments,{keepends:false},null,null) -if(! _b_.isinstance($.keepends,[_b_.bool,_b_.int])){throw _b_.TypeError.$factory("integer argument expected, got "+ +str.splitlines=function(self){var $=$B.args('splitlines',2,{self:null,keepends:null},['self','keepends'],arguments,{keepends:false},null,null) +if(!_b_.isinstance($.keepends,[_b_.bool,_b_.int])){throw _b_.TypeError('integer argument expected, got '+ $B.get_class($.keepends).__name)} -var keepends=_b_.int.$factory($.keepends) -if(keepends){var res=[],start=pos,pos=0,self=$.self -while(pos < self.length){if(self.substr(pos,2)=="\r\n"){res.push(self.substring(start,pos+2)) -start=pos+2 -pos=start}else if(self.charAt(pos)=="\r" ||self.charAt(pos)=="\n"){res.push(self.substring(start,pos+1)) -start=pos+1 -pos=start}else{pos++}} -var rest=self.substr(start) -if(rest){res.push(rest)} -return res}else{var self=$.self.replace(/[\r\n]$/,"") -return self.split(/\n|\r\n|\r/)}} +var keepends=_b_.int.$factory($.keepends),res=[],self=$.self,start=0,pos=0 +if(!self.length){return res} +while(pos < self.length){if(self.substr(pos,2)=='\r\n'){res.push(self.slice(start,keepends ? pos+2 :pos)) +start=pos=pos+2}else if(self[pos]=='\r' ||self[pos]=='\n'){res.push(self.slice(start,keepends ? pos+1 :pos)) +start=pos=pos+1}else{pos++}} +if(start < self.length){res.push(self.slice(start))} +return res} str.startswith=function(){ var $=$B.args("startswith",4,{self:null,prefix:null,start:null,end:null},["self","prefix","start","end"],arguments,{start:0,end:null},null,null) normalize_start_end($) @@ -11590,10 +12497,23 @@ if($.chars===_b_.None){return $.self.trim()} for(var i=0;i < $.self.length;i++){if($.chars.indexOf($.self.charAt(i))==-1){break}} for(var j=$.self.length-1;j >=i;j--){if($.chars.indexOf($.self.charAt(j))==-1){break}} return $.self.substring(i,j+1)} +str.swapcase=function(self){var $=$B.args("swapcase",1,{self},["self"],arguments,{},null,null),res="",char +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(unicode_tables.Ll[char]){res+=self.charAt(i).toUpperCase()}else if(unicode_tables.Lu[char]){res+=self.charAt(i).toLowerCase()}else{res+=self.charAt(i)}} +return res} +str.title=function(self){var $=$B.args("title",1,{self},["self"],arguments,{},null,null),state,char,res="" +for(var i=0,len=self.length;i < len;i++){char=self.charCodeAt(i) +if(unicode_tables.Ll[char]){if(! state){res+=self.charAt(i).toUpperCase() +state="word"}else{res+=self.charAt(i)}}else if(unicode_tables.Lu[char]||unicode_tables.Lt[char]){res+=state ? self.charAt(i).toLowerCase():self.charAt(i) +state="word"}else{state=null +res+=self.charAt(i)}} +return res} str.translate=function(self,table){var res=[],getitem=$B.$getattr(table,"__getitem__") for(var i=0,len=self.length;i < len;i++){try{var repl=getitem(self.charCodeAt(i)) -if(repl !==_b_.None){res.push(String.fromCharCode(repl))}}catch(err){res.push(self.charAt(i))}} +if(repl !==_b_.None){if(typeof repl=="string"){res.push(repl)}else if(typeof repl=="number"){res.push(String.fromCharCode(repl))}}}catch(err){res.push(self.charAt(i))}} return res.join("")} +str.upper=function(self){var $=$B.args("upper",1,{self:null},["self"],arguments,{},null,null) +return self.toUpperCase()} str.zfill=function(self,width){var $=$B.args("zfill",2,{self:null,width:null},["self","width"],arguments,{},null,null) if($.width <=self.length){return self} switch(self.charAt(0)){case "+": @@ -11602,7 +12522,10 @@ return self.charAt(0)+ "0".repeat($.width-self.length)+self.substr(1) default: return "0".repeat(width-self.length)+self}} -str.$factory=function(arg,encoding,errors){if(arg===undefined){return ""} +str.$factory=function(arg,encoding,errors){if(arguments.length==0){return ""} +if(arg===undefined){return $B.UndefinedClass.__str__()} +if(encoding !==undefined){ +var $=$B.args("str",3,{arg:null,encoding:null,errors:null},["arg","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null),encoding=$.encoding,errors=$.errors} switch(typeof arg){case "string": return str.__str__(arg) case "number": @@ -11612,19 +12535,20 @@ var func=$B.$getattr(arg.__class__,"__str__") return func(arg)} if(arg.__class__ && arg.__class__===_b_.bytes && encoding !==undefined){ -var $=$B.args("str",3,{arg:null,encoding:null,errors:null},["arg","encoding","errors"],arguments,{encoding:"utf-8",errors:"strict"},null,null) return _b_.bytes.decode(arg,$.encoding,$.errors)} -var f=$B.$getattr(arg,"__str__",null) -if(f===null || +var klass=arg.__class__ ||$B.get_class(arg) +if(klass===undefined){return $B.JSObj.__str__($B.JSObj.$factory(arg))} +var method=$B.$getattr(klass ,"__str__",null) +if(method===null || (arg.__class__ && arg.__class__ !==_b_.object && -f.$infos && f.$infos.__func__===_b_.object.__str__)){var f=$B.$getattr(arg,"__repr__")}} +method.$infos && method.$infos.__func__===_b_.object.__str__)){var method=$B.$getattr(klass,"__repr__")}} catch(err){console.log("no __str__ for",arg) console.log("err ",err) if($B.debug > 1){console.log(err)} console.log("Warning - no method __str__ or __repr__, "+ "default to toString",arg) throw err} -return $B.$call(f)()} +return $B.$call(method)(arg)} str.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("str.__new__(): not enough arguments")} return{__class__:cls}} $B.set_func_names(str,"builtins") @@ -11728,7 +12652,13 @@ pos=i+1 break}}else if(car=="}"){if(string.charAt(i+1)==car){current+=car i+=2}else{throw Error(" f-string: single '}' is not allowed")}}else{current+=car i++}} -pos=i+1}else{ +pos=i+1}else if(ctype=="debug"){ +while(string.charAt(i)==" "){i++} +if(string.charAt(i)=="}"){ +elts.push(current) +ctype=null +current="" +pos=i+1}}else{ var i=pos,nb_braces=1,nb_paren=0,current=new fstring_expression() while(i < string.length){car=string.charAt(i) if(car=="{" && nb_paren==0){nb_braces++ @@ -11760,44 +12690,119 @@ i=end+3}}else{var end=string.indexOf('"',i+1) if(end==-1){throw Error("f-string: unterminated string")}else{current.expression+=string.substring(i,end+1) i=end+1}}}else if(nb_paren==0 && car==":"){current.fmt=true current.expression+=car -i++}else{current.expression+=car +i++}else if(car=="="){ +var ce=current.expression,last_char=ce.charAt(ce.length-1),last_char_re=('()'.indexOf(last_char)>-1 ? "\\" :"")+last_char +if(ce.length==0 || +string.charAt(i+1)=="=" || +"=!<>:".search(last_char_re)>-1){current.expression+=car+string.charAt(i+1) +i+=2}else{ +tail=car +while(string.charAt(i+1).match(/\s/)){tail+=string.charAt(i+1) +i++} +elts.push(current.expression+tail) +while(ce.match(/\s$/)){ce=ce.substr(0,ce.length-1)} +current.expression=ce +ctype="debug" +i++}}else{current.expression+=car i++}} if(nb_braces > 0){throw Error("f-string: expected '}'")}}} if(current.length > 0){elts.push(current)} -return elts}})(__BRYTHON__) +return elts} +var surrogate=str.$surrogate=$B.make_class("surrogate_string",function(s){ +var items=[] +for(var i=0,len=s.length;i < len;i++){var code=s.charCodeAt(i) +if(code >=0xD800 && code <=0xDBFF){i++ +var low=s.charCodeAt(i) +code=((code-0xD800)*0x400)+(low-0xDC00)+0x10000} +items.push(String.fromCodePoint(code))} +return{ +__class__:str.$surrogate,items:items}}) +surrogate.__mro__=[str,object] +surrogate.__contains__=function(self,other){return str.__contains__(self.items.join(''),other)} +surrogate.__getitem__=function(self,arg){if(isinstance(arg,_b_.int)){var pos=arg +if(arg < 0){pos+=self.items.length} +if(pos >=0 && pos < self.items.length){if(self.items[pos].length==2){return surrogate.$factory(self.items[pos])} +return self.items[pos]} +throw _b_.IndexError.$factory("string index out of range")} +if(isinstance(arg,slice)){var s=_b_.slice.$conv_for_seq(arg,self.items.length),start=s.start,stop=s.stop,step=s.step +var res="",i=null +if(step > 0){if(stop <=start){return ""} +for(var i=start;i < stop;i+=step){res+=self.items[i]}}else{if(stop >=start){return ''} +for(var i=start;i > stop;i+=step){res+=self.items[i]}} +return res} +if(isinstance(arg,_b_.bool)){return surrogate.__getitem__(self,_b_.int.$factory(arg))} +throw _b_.TypeError.$factory("string indices must be integers")} +surrogate.__hash__=function(self){return str.__hash__(self.items.join(''))} +surrogate.__iter__=function(self){return str_iterator.$factory(self.items)} +surrogate.__len__=function(self){return self.items.length} +surrogate.__repr__=function(self){return str.__repr__(self.items.join(''))} +surrogate.__str__=function(self){return str.__str__(self.items.join(''))} +$B.set_func_names(surrogate,"builtins")})(__BRYTHON__) ; -;(function($B){var bltns=$B.InjectBuiltins() +;(function($B){ +var bltns=$B.InjectBuiltins() eval(bltns) -var object=_b_.object,str_hash=_b_.str.__hash__,$N=_b_.None -var set_ops=["eq","add","sub","and","or","xor","le","lt","ge","gt"] -$B.make_view=function(name,set_like){var klass=$B.make_class(name,function(items){return{ -__class__:klass,__dict__:_b_.dict.$factory(),counter:-1,items:items,len:items.length}}) -if(set_like){for(var i=0,len=set_ops.length;i < len;i++){var op="__"+set_ops[i]+"__" +var str_hash=_b_.str.__hash__,$N=_b_.None +var set_ops=["eq","le","lt","ge","gt","sub","rsub","and","or","xor"] +function is_sublist(t1,t2){ +for(var i=0,ilen=t1.length;i < ilen;i++){var x=t1[i],flag=false +for(var j=0,jlen=t2.length;j < jlen;j++){if($B.rich_comp("__eq__",x,t2[j])){t2.splice(j,1) +flag=true +break}} +if(! flag){return false}} +return true} +dict_view_op={__eq__:function(t1,t2){return t1.length==t2.length && is_sublist(t1,t2)},__ne__:function(t1,t2){return ! dict_view_op.__eq__(t1,t2)},__lt__:function(t1,t2){return t1.length < t2.length && is_sublist(t1,t2)},__gt__:function(t1,t2){return dict_view_op.__lt__(t2,t1)},__le__:function(t1,t2){return t1.length <=t2.length && is_sublist(t1,t2)},__ge__:function(t1,t2){return dict_view_op.__le__(t2,t1)},__and__:function(t1,t2){var items=[] +for(var i=0,ilen=t1.length;i < ilen;i++){var x=t1[i] +flag=false +for(var j=0,jlen=t2.length;j < jlen;j++){if($B.rich_comp("__eq__",x,t2[j])){t2.splice(j,1) +items.push(x) +break}}} +return items},__or__:function(t1,t2){var items=t1 +for(var j=0,jlen=t2.length;j < jlen;j++){var y=t2[j],flag=false +for(var i=0,ilen=t1.length;i < ilen;i++){if($B.rich_comp("__eq__",y,t1[i])){t2.splice(j,1) +flag=true +break}} +if(! flag){items.push(y)}} +return items}} +$B.make_view=function(name){var klass=$B.make_class(name,function(items,set_like){return{ +__class__:klass,__dict__:$B.empty_dict(),counter:-1,items:items,len:items.length,set_like:set_like}}) +for(var i=0,len=set_ops.length;i < len;i++){var op="__"+set_ops[i]+"__" klass[op]=(function(op){return function(self,other){ -return _b_.set[op](_b_.set.$factory(self),_b_.set.$factory(other))}})(op)}} +if(self.set_like){return _b_.set[op](_b_.set.$factory(self),_b_.set.$factory(other))}else{ +if(other.__class__ !==klass){return false} +var other_items=_b_.list.$factory(other) +return dict_view_op[op](self.items,other_items)}}})(op)} klass.__iter__=function(self){var it=klass.$iterator.$factory(self.items) it.len_func=self.len_func return it} +klass.__len__=function(self){return self.len} klass.__repr__=function(self){return klass.$infos.__name__+'('+_b_.repr(self.items)+')'} +$B.set_func_names(klass,"builtins") return klass} -function dict_iterator_next(self){if(self.len_func()!=self.len){throw RuntimeError.$factory("dictionary changed size during iteration")} +function dict_iterator_next(self){if(self.len_func()!=self.len){throw _b_.RuntimeError.$factory("dictionary changed size during iteration")} self.counter++ if(self.counter < self.items.length){return self.items[self.counter]} throw _b_.StopIteration.$factory("StopIteration")} -var dict={__class__:_b_.type,__mro__:[object],$infos:{__module__:"builtins",__name__:"dict"},$is_class:true,$native:true} +var dict={__class__:_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"dict"},$is_class:true,$native:true} +dict.$to_obj=function(d){ +var res={} +for(var key in d.$string_dict){res[key]=d.$string_dict[key][0]} +return res} function to_list(d,ix){var items=[],item if(d.$jsobj){items=[] for(var attr in d.$jsobj){if(attr.charAt(0)!="$"){var val=d.$jsobj[attr] if(val===undefined){val=_b_.NotImplemented} else if(val===null){val=$N} -items.push([attr,val])}}}else{for(var k in d.$numeric_dict){items.push([parseFloat(k),d.$numeric_dict[k]])} +items.push([attr,val])}}}else if(_b_.isinstance(d,_b_.dict)){for(var k in d.$numeric_dict){items.push([parseFloat(k),d.$numeric_dict[k]])} for(var k in d.$string_dict){items.push([k,d.$string_dict[k]])} -for(var k in d.$object_dict){d.$object_dict[k].forEach(function(item){items.push(item)})}} +for(var k in d.$object_dict){d.$object_dict[k].forEach(function(item){items.push(item)})} +items.sort(function(a,b){return a[1][1]-b[1][1]}) +items=items.map(function(item){return[item[0],item[1][0]]})} if(ix !==undefined){return items.map(function(item){return item[ix]})}else{items.__class__=_b_.tuple return items.map(function(item){item.__class__=_b_.tuple;return item} )}} $B.dict_to_list=to_list -function dict_iterator_next(self){if(self.len_func()!=self.len){throw RuntimeError.$factory("dictionary changed size during iteration")} +function dict_iterator_next(self){if(self.len_func()!=self.len){throw _b_.RuntimeError.$factory("dictionary changed size during iteration")} self.counter++ if(self.counter < self.items.length){return self.items[self.counter]} throw _b_.StopIteration.$factory("StopIteration")} @@ -11812,7 +12817,11 @@ if(pairs !==undefined){for(var i=0,len=pairs.length;i < len;i++){if($B.rich_comp return-1} dict.__bool__=function(){var $=$B.args("__bool__",1,{self:null},["self"],arguments,{},null,null) return dict.__len__($.self)> 0} +dict.__class_getitem__=function(cls,item){ +if(! Array.isArray(item)){item=[item]} +return $B.GenericAlias.$factory(cls,item)} dict.__contains__=function(){var $=$B.args("__contains__",2,{self:null,key:null},["self","key"],arguments,{},null,null),self=$.self,key=$.key +if(self.$is_namespace){key=$B.to_alias(key)} if(self.$jsobj){return self.$jsobj[key]!==undefined} switch(typeof key){case "string": return self.$string_dict[key]!==undefined @@ -11825,88 +12834,92 @@ if(self.$numeric_dict[hash]!==undefined && $B.rich_comp("__eq__",key,hash)){return true} return rank(self,hash,key)>-1} dict.__delitem__=function(){var $=$B.args("__eq__",2,{self:null,arg:null},["self","arg"],arguments,{},null,null),self=$.self,arg=$.arg -if(self.$jsobj){if(self.$jsobj[arg]===undefined){throw KeyError.$factory(arg)} +if(self.$jsobj){if(self.$jsobj[arg]===undefined){throw _b_.KeyError.$factory(arg)} delete self.$jsobj[arg] return $N} switch(typeof arg){case "string": -if(self.$string_dict[arg]===undefined){throw KeyError.$factory(_b_.str.$factory(arg))} +if(self.$string_dict[arg]===undefined){throw _b_.KeyError.$factory(_b_.str.$factory(arg))} delete self.$string_dict[arg] delete self.$str_hash[str_hash(arg)] self.$version++ return $N case "number": -if(self.$numeric_dict[arg]===undefined){throw KeyError.$factory(_b_.str.$factory(arg))} +if(self.$numeric_dict[arg]===undefined){throw _b_.KeyError.$factory(_b_.str.$factory(arg))} delete self.$numeric_dict[arg] self.$version++ return $N} var hash=_b_.hash(arg),ix -if((ix=rank(self,hash,arg))>-1){self.$object_dict[hash].splice(ix,1)}else{throw KeyError.$factory(_b_.str.$factory(arg))} +if((ix=rank(self,hash,arg))>-1){self.$object_dict[hash].splice(ix,1)}else{throw _b_.KeyError.$factory(_b_.str.$factory(arg))} self.$version++ return $N} dict.__eq__=function(){var $=$B.args("__eq__",2,{self:null,other:null},["self","other"],arguments,{},null,null),self=$.self,other=$.other -if(! isinstance(other,dict)){return false} +if(! _b_.isinstance(other,dict)){return false} if(self.$jsobj){self=jsobj2dict(self.$jsobj)} if(other.$jsobj){other=jsobj2dict(other.$jsobj)} if(dict.__len__(self)!=dict.__len__(other)){return false} if(self.$string_dict.length !=other.$string_dict.length){return false} -for(var k in self.$numeric_dict){if(other.$numeric_dict.hasOwnProperty(k)){if(!$B.rich_comp("__eq__",other.$numeric_dict[k],self.$numeric_dict[k])){return false}}else if(other.$object_dict.hasOwnProperty(k)){var pairs=other.$object_dict[k],flag=false +for(var k in self.$numeric_dict){if(other.$numeric_dict.hasOwnProperty(k)){if(!$B.rich_comp("__eq__",other.$numeric_dict[k][0],self.$numeric_dict[k][0])){return false}}else if(other.$object_dict.hasOwnProperty(k)){var pairs=other.$object_dict[k],flag=false for(var i=0,len=pairs.length;i < len;i++){if($B.rich_comp("__eq__",k,pairs[i][0])&& $B.rich_comp("__eq__",self.$numeric_dict[k],pairs[i][1])){flag=true break}} if(! flag){return false}}else{return false}} for(var k in self.$string_dict){if(!other.$string_dict.hasOwnProperty(k)|| -!$B.rich_comp("__eq__",other.$string_dict[k],self.$string_dict[k])){return false}} +!$B.rich_comp("__eq__",other.$string_dict[k][0],self.$string_dict[k][0])){return false}} for(var hash in self.$object_dict){var pairs=self.$object_dict[hash] var other_pairs=[] if(other.$numeric_dict[hash]!==undefined){other_pairs.push([hash,other.$numeric_dict[hash]])} if(other.$object_dict[hash]!==undefined){other_pairs=other_pairs.concat(other.$object_dict[hash])} if(other_pairs.length==0){return false} for(var i=0,len_i=pairs.length;i < len_i;i++){var flag=false -var key=pairs[i][0],value=pairs[i][1] +var key=pairs[i][0],value=pairs[i][1][0] for(var j=0,len_j=other_pairs.length;j < len_j;j++){if($B.rich_comp("__eq__",key,other_pairs[j][0])&& -$B.rich_comp("__eq__",value,other_pairs[j][1])){flag=true +$B.rich_comp("__eq__",value,other_pairs[j][1][0])){flag=true break}} if(! flag){return false}}} return true} dict.__getitem__=function(){var $=$B.args("__getitem__",2,{self:null,arg:null},["self","arg"],arguments,{},null,null),self=$.self,arg=$.arg -if(self.$jsobj){if(!self.$jsobj.hasOwnProperty(arg)){throw _b_.KeyError.$factory(str.$factory(arg))}else if(self.$jsobj[arg]===undefined){return _b_.NotImplemented}else if(self.$jsobj[arg]===null){return $N} +return dict.$getitem(self,arg)} +dict.$getitem=function(self,arg){if(self.$jsobj){if(self.$jsobj[arg]===undefined){if(self.$jsobj.hasOwnProperty(arg)){return $B.Undefined} +throw _b_.KeyError.$factory(arg)} return self.$jsobj[arg]} switch(typeof arg){case "string": -if(self.$string_dict[arg]!==undefined){return self.$string_dict[arg]} +if(self.$string_dict[arg]!==undefined){return self.$string_dict[arg][0]} break case "number": -if(self.$numeric_dict[arg]!==undefined){return self.$numeric_dict[arg]} +if(self.$numeric_dict[arg]!==undefined){return self.$numeric_dict[arg][0]} break} var hash=_b_.hash(arg),_eq=function(other){return $B.rich_comp("__eq__",arg,other)} -arg.$hash=hash +if(typeof arg=="object"){arg.$hash=hash } var sk=self.$str_hash[hash] -if(sk !==undefined && _eq(sk)){return self.$string_dict[sk]} -if(self.$numeric_dict[hash]!==undefined && _eq(hash)){return self.$numeric_dict[hash]} -if(isinstance(arg,_b_.str)){ +if(sk !==undefined && _eq(sk)){return self.$string_dict[sk][0]} +if(self.$numeric_dict[hash]!==undefined && _eq(hash)){return self.$numeric_dict[hash][0]} +if(_b_.isinstance(arg,_b_.str)){ var res=self.$string_dict[arg.valueOf()] -if(res !==undefined){return res}} +if(res !==undefined){return res[0]}} var ix=rank(self,hash,arg) -if(ix >-1){return self.$object_dict[hash][ix][1]} +if(ix >-1){return self.$object_dict[hash][ix][1][0]} if(self.__class__ !==dict){try{var missing_method=getattr(self.__class__,"__missing__",_b_.None)}catch(err){console.log(err)} if(missing_method !==_b_.None){return missing_method(self,arg)}} -throw KeyError.$factory(arg)} +throw _b_.KeyError.$factory(arg)} dict.__hash__=_b_.None function init_from_list(self,args){var i=-1,stop=args.length-1,si=dict.__setitem__ while(i++< stop){var item=args[i] switch(typeof item[0]){case 'string': -self.$string_dict[item[0]]=item[1] +self.$string_dict[item[0]]=[item[1],self.$order++] self.$str_hash[str_hash(item[0])]=item[0] +self.$version++ break case 'number': -self.$numeric_dict[item[0]]=item[1] -break +if(item[0]!=0 && item[0]!=1){self.$numeric_dict[item[0]]=[item[1],self.$order++] +self.$version++ +break} default: si(self,item[0],item[1]) break}}} dict.__init__=function(self,first,second){var $ if(first===undefined){return $N} -if(second===undefined){if(first.__class__===$B.JSObject){self.$jsobj=first.js -return $N}else if(first.$jsobj){self.$jsobj={} +if(second===undefined){if(first.$nat !='kw' && $B.get_class(first)===$B.JSObj){for(var key in first){self.$string_dict[key]=[first[key],self.$order++]} +return _b_.None}else if(first.$jsobj){self.$jsobj={} for(var attr in first.$jsobj){self.$jsobj[attr]=first.$jsobj[attr]} return $N}else if(Array.isArray(first)){init_from_list(self,first) return $N}} @@ -11915,7 +12928,7 @@ var args=$.first if(args.length > 1){throw _b_.TypeError.$factory("dict expected at most 1 argument"+ ", got 2")}else if(args.length==1){args=args[0] if(args.__class__===dict){['$string_dict','$str_hash','$numeric_dict','$object_dict']. -forEach(function(d){for(key in args[d]){self[d][key]=args[d][key]}})}else if(isinstance(args,dict)){$copy_dict(self,args)}else{var keys=$B.$getattr(args,"keys",null) +forEach(function(d){for(key in args[d]){self[d][key]=args[d][key]}})}else if(_b_.isinstance(args,dict)){$copy_dict(self,args)}else{var keys=$B.$getattr(args,"keys",null) if(keys !==null){var gi=$B.$getattr(args,"__getitem__",null) if(gi !==null){ gi=$B.$call(gi) @@ -11928,17 +12941,20 @@ if(! Array.isArray(args)){args=_b_.list.$factory(args)} init_from_list(self,args)}} var kw=$.second.$string_dict for(var attr in kw){switch(typeof attr){case "string": -self.$string_dict[attr]=kw[attr] +self.$string_dict[attr]=[kw[attr][0],self.$order++] self.$str_hash[str_hash(attr)]=attr break case "number": -self.$numeric_dict[attr]=kw[attr] +self.$numeric_dict[attr]=[kw[attr][0],self.$order++] break default: -si(self,attr,kw[attr]) +si(self,attr,kw[attr][0]) break}} return $N} dict.__iter__=function(self){return _b_.iter(dict.$$keys(self))} +dict.__ior__=function(self,other){ +dict.update(self,other) +return self} dict.__len__=function(self){var _count=0 if(self.$jsobj){for(var attr in self.$jsobj){if(attr.charAt(0)!="$"){_count++}} return _count} @@ -11948,15 +12964,26 @@ for(var hash in self.$object_dict){_count+=self.$object_dict[hash].length} return _count} dict.__ne__=function(self,other){return ! dict.__eq__(self,other)} dict.__new__=function(cls){if(cls===undefined){throw _b_.TypeError.$factory("int.__new__(): not enough arguments")} -var instance={__class__:cls,$numeric_dict :{},$object_dict :{},$string_dict :{},$str_hash:{},$version:0} -if(cls !==dict){instance.__dict__=_b_.dict.$factory()} +var instance={__class__:cls,$numeric_dict :{},$object_dict :{},$string_dict :{},$str_hash:{},$version:0,$order:0} +if(cls !==dict){instance.__dict__=$B.empty_dict()} return instance} +dict.__or__=function(self,other){ +if(! _b_.isinstance(other,dict)){return _b_.NotImplemented} +var res=dict.copy(self) +dict.update(res,other) +return res} dict.__repr__=function(self){if(self.$jsobj){ return dict.__repr__(jsobj2dict(self.$jsobj))} +if($B.repr.enter(self)){return "{...}"} var res=[],items=to_list(self) -items.forEach(function(item){if((!self.$jsobj && item[1]===self)|| -(self.$jsobj && item[1]===self.$jsobj)){res.push(repr(item[0])+": {...}")}else{res.push(repr(item[0])+": "+repr(item[1]))}}) +items.forEach(function(item){try{res.push(repr(item[0])+": "+repr(item[1]))}catch(err){throw err}}) +$B.repr.leave(self) return "{"+res.join(", ")+"}"} +dict.__ror__=function(self,other){ +if(! _b_.isinstance(other,dict)){return _b_.NotImplemented} +var res=dict.copy(other) +dict.update(res,self) +return res} dict.__setitem__=function(self,key,value){var $=$B.args("__setitem__",3,{self:null,key:null,value:null},["self","key","value"],arguments,{},null,null) return dict.$setitem($.self,$.key,$.value)} dict.$setitem=function(self,key,value,$hash){ @@ -11967,29 +12994,45 @@ if(key=="__init__" ||key=="__new__"){ self.$jsobj.$factory=$B.$instance_creator(self.$jsobj)}}else{self.$jsobj[key]=value} return $N} switch(typeof key){case "string": -self.$string_dict[key]=value +if(self.$string_dict===undefined){console.log("pas de string dict",self,key,value)} +if(self.$string_dict[key]!==undefined){self.$string_dict[key][0]=value}else{self.$string_dict[key]=[value,self.$order++] self.$str_hash[str_hash(key)]=key -self.$version++ +self.$version++} return $N case "number": -self.$numeric_dict[key]=value -self.$version++ -return $N} +if(self.$numeric_dict[key]!==undefined){ +self.$numeric_dict[key][0]=value}else{ +var done=false +if((key==0 ||key==1)&& +self.$object_dict[key]!==undefined){for(const item of self.$object_dict[key]){if((key==0 && item[0]===false)|| +(key==1 && item[0]===true)){ +item[1][0]=value +done=true}}} +if(! done){ +self.$numeric_dict[key]=[value,self.$order++]} +self.$version++} +return $N +case "boolean": +var num=key ? 1 :0 +if(self.$numeric_dict[num]!==undefined){var order=self.$numeric_dict[num][1] +self.$numeric_dict[num]=[value,order] +return} +if(self.$object_dict[num]!==undefined){self.$object_dict[num].push([key,[value,self.$order++]])}else{self.$object_dict[num]=[[key,[value,self.$order++]]]}} var hash=$hash===undefined ? _b_.hash(key):$hash,_eq=function(other){return $B.rich_comp("__eq__",key,other)} -if(self.$numeric_dict[hash]!==undefined && _eq(hash)){self.$numeric_dict[hash]=value +if(self.$numeric_dict[hash]!==undefined && _eq(hash)){self.$numeric_dict[hash]=[value,self.$numeric_dict[hash][1]] self.$version++ return $N} var sk=self.$str_hash[hash] -if(sk !==undefined && _eq(sk)){self.$string_dict[sk]=value +if(sk !==undefined && _eq(sk)){self.$string_dict[sk]=[value,self.$string_dict[sk][1]] self.$version++ return $N} -if($hash){if(self.$object_dict[$hash]!==undefined){self.$object_dict[$hash].push([key,value])}else{self.$object_dict[$hash]=[[key,value]]} +if($hash){if(self.$object_dict[$hash]!==undefined){self.$object_dict[$hash].push([key,[value,self.$order++]])}else{self.$object_dict[$hash]=[[key,[value,self.$order++]]]} self.$version++ return $N} var ix=rank(self,hash,key) if(ix >-1){ -self.$object_dict[hash][ix][1]=value -return $N}else if(self.$object_dict.hasOwnProperty(hash)){self.$object_dict[hash].push([key,value])}else{self.$object_dict[hash]=[[key,value]]} +self.$object_dict[hash][ix][1]=[value,self.$object_dict[hash][ix][1][1]] +return $N}else if(self.$object_dict.hasOwnProperty(hash)){self.$object_dict[hash].push([key,[value,self.$order++]])}else{self.$object_dict[hash]=[[key,[value,self.$order++]]]} self.$version++ return $N} dict.__str__=function(){return dict.__repr__.apply(null,arguments)} @@ -12002,9 +13045,10 @@ self.$str_hash={} self.$object_dict={} if(self.$jsobj){for(var attr in self.$jsobj){if(attr.charAt(0)!=="$" && attr !=="__class__"){delete self.$jsobj[attr]}}} self.$version++ +self.$order=0 return $N} dict.copy=function(self){ -var $=$B.args("copy",1,{self:null},["self"],arguments,{},null,null),self=$.self,res=_b_.dict.$factory() +var $=$B.args("copy",1,{self:null},["self"],arguments,{},null,null),self=$.self,res=$B.empty_dict() $copy_dict(res,self) return res} dict.fromkeys=function(){var $=$B.args("fromkeys",3,{cls:null,keys:null,value:null},["cls","keys","value"],arguments,{value:_b_.None},null,null),keys=$.keys,value=$.value @@ -12021,14 +13065,17 @@ var dict_items=$B.make_view("dict_items",true) dict_items.$iterator=$B.make_iterator_class("dict_itemiterator") dict.items=function(self){if(arguments.length > 1){var _len=arguments.length-1,_msg="items() takes no arguments ("+_len+" given)" throw _b_.TypeError.$factory(_msg)} -var it=dict_items.$factory(to_list(self)) +var items=to_list(self),set_like=true +for(var i=0,len=items.length;i < len;i++){try{_b_.hash(items[i][1])}catch(err){set_like=false +break}} +var it=dict_items.$factory(to_list(self),set_like) it.len_func=function(){return dict.__len__(self)} return it} -var dict_keys=$B.make_view("dict_keys",true) +var dict_keys=$B.make_view("dict_keys") dict_keys.$iterator=$B.make_iterator_class("dict_keyiterator") dict.$$keys=function(self){if(arguments.length > 1){var _len=arguments.length-1,_msg="keys() takes no arguments ("+_len+" given)" throw _b_.TypeError.$factory(_msg)} -var it=dict_keys.$factory(to_list(self,0)) +var it=dict_keys.$factory(to_list(self,0),true) it.len_func=function(){return dict.__len__(self)} return it} dict.pop=function(){var missing={},$=$B.args("pop",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:missing},null,null),self=$.self,key=$.key,_default=$._default @@ -12039,7 +13086,7 @@ throw err} throw err}} dict.popitem=function(self){try{var itm=_b_.next(_b_.iter(dict.items(self))) dict.__delitem__(self,itm[0]) -return _b_.tuple.$factory(itm)}catch(err){if(err.__class__==_b_.StopIteration){throw KeyError.$factory("'popitem(): dictionary is empty'")}}} +return _b_.tuple.$factory(itm)}catch(err){if(err.__class__==_b_.StopIteration){throw _b_.KeyError.$factory("'popitem(): dictionary is empty'")}}} dict.setdefault=function(){var $=$B.args("setdefault",3,{self:null,key:null,_default:null},["self","key","_default"],arguments,{_default:$N},null,null),self=$.self,key=$.key,_default=$._default try{return dict.__getitem__(self,key)} catch(err){if(err.__class__ !==_b_.KeyError){throw err} @@ -12050,8 +13097,8 @@ dict.$setitem(self,key,_default,hash) return _default}} dict.update=function(self){var $=$B.args("update",1,{"self":null},["self"],arguments,{},"args","kw"),self=$.self,args=$.args,kw=$.kw if(args.length > 0){var o=args[0] -if(isinstance(o,dict)){if(o.$jsobj){o=jsobj2dict(o.$jsobj)} -$copy_dict(self,o)}else if(hasattr(o,"keys")){var _keys=_b_.list.$factory($B.$call($B.$getattr(o,"keys"))()) +if(_b_.isinstance(o,dict)){if(o.$jsobj){o=jsobj2dict(o.$jsobj)} +$copy_dict(self,o)}else if(_b_.hasattr(o,"keys")){var _keys=_b_.list.$factory($B.$call($B.$getattr(o,"keys"))()) for(var i=0,len=_keys.length;i < len;i++){var _value=getattr(o,"__getitem__")(_keys[i]) dict.$setitem(self,_keys[i],_value)}}else{var it=_b_.iter(o),i=0 while(true){try{var item=_b_.next(it)}catch(err){if(err.__class__===_b_.StopIteration){break} @@ -12070,7 +13117,8 @@ var dict_values=$B.make_view("dict_values") dict_values.$iterator=$B.make_iterator_class("dict_valueiterator") dict.values=function(self){if(arguments.length > 1){var _len=arguments.length-1,_msg="values() takes no arguments ("+_len+" given)" throw _b_.TypeError.$factory(_msg)} -var it=dict_values.$factory(to_list(self,1)) +var values=to_list(self,1) +var it=dict_values.$factory(to_list(self,1),false) it.len_func=function(){return dict.__len__(self)} return it} dict.$factory=function(){var res=dict.__new__(dict) @@ -12080,9 +13128,17 @@ dict.__init__.apply(null,args) return res} _b_.dict=dict $B.set_func_names(dict,"builtins") +dict.__class_getitem__=_b_.classmethod.$factory(dict.__class_getitem__) +$B.empty_dict=function(){return{ +__class__:dict,$numeric_dict :{},$object_dict :{},$string_dict :{},$str_hash:{},$version:0,$order:0}} dict.fromkeys=_b_.classmethod.$factory(dict.fromkeys) +$B.getset_descriptor=$B.make_class("getset_descriptor",function(klass,attr){return{ +__class__:$B.getset_descriptor,__doc__:_b_.None,cls:klass,attr:attr}} +) +$B.getset_descriptor.__repr__=$B.getset_descriptor.__str__=function(self){return ``} +$B.set_func_names($B.getset_descriptor,"builtins") var mappingproxy=$B.mappingproxy=$B.make_class("mappingproxy",function(obj){if(_b_.isinstance(obj,dict)){ -var res=$B.obj_dict(obj.$string_dict)}else{var res=$B.obj_dict(obj)} +var res=$B.obj_dict(dict.$to_obj(obj))}else{var res=$B.obj_dict(obj)} res.__class__=mappingproxy return res} ) @@ -12092,27 +13148,24 @@ for(var attr in dict){if(mappingproxy[attr]!==undefined || ["__class__","__mro__","__new__","__init__","__delitem__","clear","fromkeys","pop","popitem","setdefault","update"].indexOf(attr)>-1){continue} if(typeof dict[attr]=="function"){mappingproxy[attr]=(function(key){return function(){return dict[key].apply(null,arguments)}})(attr)}else{mappingproxy[attr]=dict[attr]}} $B.set_func_names(mappingproxy,"builtins") -function jsobj2dict(x){var d=dict.$factory() -for(var attr in x){if(attr.charAt(0)!="$" && attr !=="__class__"){if(x[attr]===undefined){continue}else if(x[attr].$jsobj===x){d.$string_dict[attr]=d}else{d.$string_dict[attr]=x[attr]}}} +function jsobj2dict(x){var d=$B.empty_dict() +for(var attr in x){if(attr.charAt(0)!="$" && attr !=="__class__"){if(x[attr]===null){d.$string_dict[attr]=[_b_.None,d.$order++]}else if(x[attr]===undefined){continue}else if(x[attr].$jsobj===x){d.$string_dict[attr]=[d,d.$order++]}else{d.$string_dict[attr]=[$B.$JS2Py(x[attr]),d.$order++]}}} return d} $B.obj_dict=function(obj,from_js){var klass=obj.__class__ ||$B.get_class(obj) if(klass !==undefined && klass.$native){throw _b_.AttributeError.$factory(klass.__name__+ " has no attribute '__dict__'")} -var res=dict.$factory() +var res=$B.empty_dict() res.$jsobj=obj res.$from_js=from_js return res}})(__BRYTHON__) ; ;(function($B){ -var _b_=$B.builtins; -var object=_b_.object -var JSObject=$B.JSObject -var _window=self; +var _b_=$B.builtins,object=_b_.object,_window=self function $getMouseOffset(target,ev){ev=ev ||_window.event; var docPos=$getPosition(target); var mousePos=$mouseCoords(ev); return{x:mousePos.x-docPos.x,y:mousePos.y-docPos.y};} -function $getPosition(e){var left=0,top=0,width=e.width ||e.offsetWidth,height=e.height ||e.offsetHeight +function $getPosition(e){var left=0,top=0,width=e.width ||e.offsetWidth,height=e.height ||e.offsetHeight,scroll=document.scrollingElement.scrollTop while(e.offsetParent){left+=e.offsetLeft top+=e.offsetTop e=e.offsetParent} @@ -12123,7 +13176,15 @@ var parent_pos=$getPosition(e.parentElement) left+=parent_pos.left top+=parent_pos.top} return{left:left,top:top,width:width,height:height}} -function $mouseCoords(ev){var posx=0,posy=0 +function trace(msg){var elt=document.getElementById("trace") +if(elt){elt.innerText+=msg}} +function $mouseCoords(ev){if(ev.type.startsWith("touch")){var res={} +res.x=_b_.int.$factory(ev.touches[0].screenX) +res.y=_b_.int.$factory(ev.touches[0].screenY) +res.__getattr__=function(attr){return this[attr]} +res.__class__="MouseCoords" +return res} +var posx=0,posy=0 if(!ev){var ev=_window.event} if(ev.pageX ||ev.pageY){posx=ev.pageX posy=ev.pageY}else if(ev.clientX ||ev.clientY){posx=ev.clientX+document.body.scrollLeft+ @@ -12190,13 +13251,17 @@ typeof $.self.elt.setAttributeNS=="function"){$.self.elt.setAttributeNS(null,$.k return _b_.None}else if(typeof $.self.elt.setAttribute=="function"){$.self.elt.setAttribute($.key,$.value) return _b_.None} throw _b_.TypeError.$factory("Can't set attributes on element")} +Attributes.__repr__=Attributes.__str__=function(self){var attrs=self.elt.attributes,items=[] +for(var i=0;i < attrs.length;i++){items.push(attrs[i].name+': "'+ +self.elt.getAttributeNS(null,attrs[i].name)+'"')} +return '{'+items.join(", ")+'}'} Attributes.get=function(){var $=$B.args("get",3,{self:null,key:null,deflt:null},["self","key","deflt"],arguments,{deflt:_b_.None},null,null) -try{return Attributes.__getitem__($.self,$.key)}catch(err){if(err.__class__===_b_.KeyError){return $B.deflt}else{throw err}}} +try{return Attributes.__getitem__($.self,$.key)}catch(err){if(err.__class__===_b_.KeyError){return $.deflt}else{throw err}}} Attributes.keys=function(){return Attributes.__iter__.apply(null,arguments)} -Attributes.items=function(){var $=$B.args("values",1,{self:null},["self"],arguments,{},null,null),attrs=$.self.elt.attributes,values=[] +Attributes.items=function(){var $=$B.args("values",1,{self:null},["self"],arguments,{},null,null),attrs=$.self.attributes,values=[] for(var i=0;i < attrs.length;i++){values.push([attrs[i].name,attrs[i].value])} return _b_.list.__iter__(values)} -Attributes.values=function(){var $=$B.args("values",1,{self:null},["self"],arguments,{},null,null),attrs=$.self.elt.attributes,values=[] +Attributes.values=function(){var $=$B.args("values",1,{self:null},["self"],arguments,{},null,null),attrs=$.self.attributes,values=[] for(var i=0;i < attrs.length;i++){values.push(attrs[i].value)} return _b_.list.__iter__(values)} $B.set_func_names(Attributes,"") @@ -12219,8 +13284,8 @@ return $mouseCoords(self).x case 'y': return $mouseCoords(self).y case 'data': -if(self.dataTransfer !==undefined){return Clipboard.$factory(self.dataTransfer)} -return self['data'] +if(self.dataTransfer !==null){return Clipboard.$factory(self.dataTransfer)} +return $B.$JS2Py(self['data']) case 'target': if(self.target !==undefined){return DOMNode.$factory(self.target)} case 'char': @@ -12245,17 +13310,18 @@ attr+"'")} DOMEvent.$factory=function(evt_name){ return DOMEvent.__new__(DOMEvent,evt_name)} var $DOMEvent=$B.$DOMEvent=function(ev){ev.__class__=DOMEvent +ev.$no_dict=true if(ev.preventDefault===undefined){ev.preventDefault=function(){ev.returnValue=false}} if(ev.stopPropagation===undefined){ev.stopPropagation=function(){ev.cancelBubble=true}} return ev} -$B.set_func_names(DOMEvent,"") -var Clipboard={__class__:_b_.type,$infos:{__module__:"",__name__:"Clipboard"}} +$B.set_func_names(DOMEvent,"browser") +var Clipboard={__class__:_b_.type,$infos:{__module__:"browser",__name__:"Clipboard"}} Clipboard.__getitem__=function(self,name){return self.data.getData(name)} Clipboard.__mro__=[object] Clipboard.__setitem__=function(self,name,value){self.data.setData(name,value)} Clipboard.$factory=function(data){ return{ -__class__ :Clipboard,__dict__:_b_.dict.$factory(),data :data}} +__class__ :Clipboard,__dict__:$B.empty_dict(),data :data}} $B.set_func_names(Clipboard,"") function $EventsList(elt,evt,arg){ this.elt=elt @@ -12290,20 +13356,20 @@ else{self.parent.options.add(element.elt,index)}},item:function(self,index){retu Options.$factory=function(parent){return{ __class__:Options,parent:parent}} $B.set_func_names(Options,"") -var DOMNode={__class__ :_b_.type,__mro__:[object],$infos:{__module__:"",__name__:"DOMNode"}} +var DOMNode={__class__ :_b_.type,__mro__:[object],$infos:{__module__:"browser",__name__:"DOMNode"}} DOMNode.$factory=function(elt,fromtag){if(elt.__class__===DOMNode){return elt} if(typeof elt=="number" ||typeof elt=="boolean" || typeof elt=="string"){return elt} if(fromtag===undefined){if(DOMNode.tags !==undefined){ var tdict=DOMNode.tags.$string_dict -if(tdict !==undefined){var klass=tdict[elt.tagName] +if(tdict !==undefined && tdict.hasOwnProperty(elt.tagName)){try{var klass=tdict[elt.tagName][0]}catch(err){console.log("tdict",tdict,"tag name",elt.tagName) +throw err} if(klass !==undefined){ klass.$elt_wrap=elt return klass.$factory()}}}} if(elt["$brython_id"]===undefined ||elt.nodeType==9){ elt.$brython_id="DOM-"+$B.UUID()} -return{ -__class__:DOMNode,elt:elt}} +return elt} DOMNode.__add__=function(self,other){ var res=TagSum.$factory() res.children=[self],pos=1 @@ -12315,52 +13381,60 @@ $B.class_name(other)+"' object to DOMNode instance")}} return res} DOMNode.__bool__=function(self){return true} DOMNode.__contains__=function(self,key){ -if(self.elt.nodeType==9 && typeof key=="string"){return document.getElementById(key)!==null} -key=key.elt !==undefined ? key.elt :key -if(self.elt.length !==undefined && typeof self.elt.item=="function"){for(var i=0,len=self.elt.length;i < len;i++){if(self.elt.item(i)===key){return true}}} +if(self.nodeType==9 && typeof key=="string"){return document.getElementById(key)!==null} +if(self.length !==undefined && typeof self.item=="function"){for(var i=0,len=self.length;i < len;i++){if(self.item(i)===key){return true}}} return false} DOMNode.__del__=function(self){ -if(!self.elt.parentNode){throw _b_.ValueError.$factory("can't delete "+_b_.str.$factory(self.elt))} -self.elt.parentNode.removeChild(self.elt)} -DOMNode.__delitem__=function(self,key){if(self.elt.nodeType==9){ -var res=self.elt.getElementById(key) +if(!self.parentNode){throw _b_.ValueError.$factory("can't delete "+_b_.str.$factory(self))} +self.parentNode.removeChild(self)} +DOMNode.__delattr__=function(self,attr){if(self[attr]===undefined){throw _b_.AttributeError.$factory( +`cannot delete DOMNode attribute '${attr}'`)} +delete self[attr] +return _b_.None} +DOMNode.__delitem__=function(self,key){if(self.nodeType==9){ +var res=self.getElementById(key) if(res){res.parentNode.removeChild(res)} else{throw _b_.KeyError.$factory(key)}}else{ -self.elt.parentNode.removeChild(self.elt)}} +self.parentNode.removeChild(self)}} DOMNode.__dir__=function(self){var res=[] -for(var attr in self.elt){if(attr.charAt(0)!="$"){res.push(attr)}} -for(var attr in DOMNode){if(attr.charAt(0)!="$" && res.indexOf(attr)==-1){res.push(attr)}} +for(var attr in self){if(attr.charAt(0)!="$"){res.push(attr)}} +res.sort() return res} -DOMNode.__eq__=function(self,other){return self.elt==other.elt} +DOMNode.__eq__=function(self,other){return self==other} DOMNode.__getattribute__=function(self,attr){if(attr.substr(0,2)=="$$"){attr=attr.substr(2)} switch(attr){case "attrs": -return Attributes.$factory(self.elt) +return Attributes.$factory(self) case "class_name": case "html": case "id": case "parent": -case "query": case "text": return DOMNode[attr](self) case "height": case "left": case "top": case "width": -if(self.elt.tagName=="CANVAS" && self.elt[attr]){return self.elt[attr]} -if(self.elt instanceof SVGElement){return self.elt[attr].baseVal.value} -if(self.elt.style[attr]){return parseInt(self.elt.style[attr])}else{throw _b_.AttributeError.$factory("style."+attr+ +if(self.tagName=="CANVAS" && self[attr]){return self[attr]} +if(self instanceof SVGElement){return self[attr].baseVal.value} +if(self.style[attr]){return parseInt(self.style[attr])}else{var computed=window.getComputedStyle(self)[attr] +if(computed !==undefined){return Math.floor(parseFloat(computed)+0.5)} +throw _b_.AttributeError.$factory("style."+attr+ " is not set for "+_b_.str.$factory(self))} +case "x": +case "y": +if(!(self instanceof SVGElement)){var pos=$getPosition(self) +return attr=="x" ? pos.left :pos.top} case "clear": case "closest": return function(){return DOMNode[attr](self,arguments[0])} case "headers": -if(self.elt.nodeType==9){ +if(self.nodeType==9){ var req=new XMLHttpRequest(); req.open("GET",document.location,false) req.send(null); var headers=req.getAllResponseHeaders() headers=headers.split("\r\n") -var res=_b_.dict.$factory() +var res=$B.empty_dict() for(var i=0;i < headers.length;i++){var header=headers[i] if(header.strip().length==0){continue} var pos=header.search(":") @@ -12370,12 +13444,28 @@ break case "$$location": attr="location" break} -if(attr=="select" && self.elt.nodeType==1 && -["INPUT","TEXTAREA"].indexOf(self.elt.tagName.toUpperCase())>-1){return function(selector){if(selector===undefined){self.elt.select();return _b_.None} +if(attr=="select" && self.nodeType==1 && +["INPUT","TEXTAREA"].indexOf(self.tagName.toUpperCase())>-1){return function(selector){if(selector===undefined){self.select();return _b_.None} return DOMNode.select(self,selector)}} -var property=self.elt[attr] -if(property===undefined && $B.aliased_names[attr]){property=self.elt["$$"+attr]} -if(property===undefined){return object.__getattribute__(self,attr)} +if(attr=="query" && self.nodeType==9){ +var res={__class__:Query,_keys :[],_values :{}} +var qs=location.search.substr(1).split('&') +if(location.search !=""){for(var i=0;i < qs.length;i++){var pos=qs[i].search("="),elts=[qs[i].substr(0,pos),qs[i].substr(pos+1)],key=decodeURIComponent(elts[0]),value=decodeURIComponent(elts[1]) +if(res._keys.indexOf(key)>-1){res._values[key].push(value)}else{res._keys.push(key) +res._values[key]=[value]}}} +return res} +var property=self[attr] +if(property===undefined && $B.aliased_names[attr]){property=self["$$"+attr]} +if(property===undefined){ +if(self.tagName){var ce=customElements.get(self.tagName.toLowerCase()) +if(ce !==undefined && ce.$cls !==undefined){ +var save_class=self.__class__ +self.__class__=ce.$cls +try{var res=_b_.object.__getattribute__(self,attr) +self.__class__=save_class +return res}catch(err){self.__class__=save_class +if(! $B.is_exc(err,[_b_.AttributeError])){throw err}}}} +return object.__getattribute__(self,attr)} var res=property if(res !==undefined){if(res===null){return _b_.None} if(typeof res==="function"){ @@ -12384,48 +13474,50 @@ for(var i=0;i < arguments.length;i++){var arg=arguments[i] if(typeof arg=="function"){ if(arg.$cache){var f1=arg.$cache}else{var f1=function(dest_fn){return function(){try{return dest_fn.apply(null,arguments)}catch(err){$B.handle_error(err)}}}(arg) arg.$cache=f1} -args[pos++]=f1} -else if(_b_.isinstance(arg,JSObject)){args[pos++]=arg.js}else if(_b_.isinstance(arg,DOMNode)){args[pos++]=arg.elt}else if(arg===_b_.None){args[pos++]=null}else{args[pos++]=arg}} +args[pos++]=f1}else if(_b_.isinstance(arg,DOMNode)){args[pos++]=arg}else if(arg===_b_.None){args[pos++]=null}else if(arg.__class__==_b_.dict){args[pos++]=_b_.dict.$to_obj(arg)}else{args[pos++]=arg}} var result=f.apply(elt,args) -return $B.$JS2Py(result)}})(res,self.elt) -func.$infos={__name__ :attr} +return $B.$JS2Py(result)}})(res,self) +func.$infos={__name__ :attr,__qualname__:attr} func.$is_func=true return func} -if(attr=='options'){return Options.$factory(self.elt)} -if(attr=='style'){return $B.JSObject.$factory(self.elt[attr])} +if(attr=='options'){return Options.$factory(self)} +if(attr=='style'){return $B.JSObj.$factory(self[attr])} if(Array.isArray(res)){return res} return $B.$JS2Py(res)} return object.__getattribute__(self,attr)} -DOMNode.__getitem__=function(self,key){if(self.elt.nodeType==9){ -if(typeof key=="string"){var res=self.elt.getElementById(key) +DOMNode.__getitem__=function(self,key){if(self.nodeType==9){ +if(typeof key=="string"){var res=self.getElementById(key) if(res){return DOMNode.$factory(res)} -throw _b_.KeyError.$factory(key)}else{try{var elts=self.elt.getElementsByTagName(key.$infos.__name__),res=[] +throw _b_.KeyError.$factory(key)}else{try{var elts=self.getElementsByTagName(key.$infos.__name__),res=[] for(var i=0;i < elts.length;i++){res.push(DOMNode.$factory(elts[i]))} return res}catch(err){throw _b_.KeyError.$factory(_b_.str.$factory(key))}}}else{if((typeof key=="number" ||typeof key=="boolean")&& -typeof self.elt.item=="function"){var key_to_int=_b_.int.$factory(key) -if(key_to_int < 0){key_to_int+=self.elt.length} -var res=DOMNode.$factory(self.elt.item(key_to_int)) +typeof self.item=="function"){var key_to_int=_b_.int.$factory(key) +if(key_to_int < 0){key_to_int+=self.length} +var res=DOMNode.$factory(self.item(key_to_int)) if(res===undefined){throw _b_.KeyError.$factory(key)} return res}else if(typeof key=="string" && -self.elt.attributes && -typeof self.elt.attributes.getNamedItem=="function"){var attr=self.elt.attributes.getNamedItem(key) +self.attributes && +typeof self.attributes.getNamedItem=="function"){var attr=self.attributes.getNamedItem(key) if(!!attr){return attr.value} throw _b_.KeyError.$factory(key)}}} +DOMNode.__hash__=function(self){return self.__hashvalue__===undefined ? +(self.__hashvalue__=$B.$py_next_hash--): +self.__hashvalue__} DOMNode.__iter__=function(self){ -if(self.elt.length !==undefined && typeof self.elt.item=="function"){var items=[] -for(var i=0,len=self.elt.length;i < len;i++){items.push(DOMNode.$factory(self.elt.item(i)))}}else if(self.elt.childNodes !==undefined){var items=[] -for(var i=0,len=self.elt.childNodes.length;i < len;i++){items.push(DOMNode.$factory(self.elt.childNodes[i]))}} +if(self.length !==undefined && typeof self.item=="function"){var items=[] +for(var i=0,len=self.length;i < len;i++){items.push(DOMNode.$factory(self.item(i)))}}else if(self.childNodes !==undefined){var items=[] +for(var i=0,len=self.childNodes.length;i < len;i++){items.push(DOMNode.$factory(self.childNodes[i]))}} return $B.$iter(items)} DOMNode.__le__=function(self,other){ -var elt=self.elt -if(self.elt.nodeType==9){elt=self.elt.body} -if(_b_.isinstance(other,TagSum)){for(var i=0;i < other.children.length;i++){elt.appendChild(other.children[i].elt)}}else if(typeof other=="string" ||typeof other=="number"){var $txt=document.createTextNode(other.toString()) -elt.appendChild($txt)}else if(_b_.isinstance(other,DOMNode)){ -elt.appendChild(other.elt)}else{try{ +if(self.nodeType==9){self=self.body} +if(_b_.isinstance(other,TagSum)){for(var i=0;i < other.children.length;i++){self.appendChild(other.children[i])}}else if(typeof other=="string" ||typeof other=="number"){var $txt=document.createTextNode(other.toString()) +self.appendChild($txt)}else if(_b_.isinstance(other,DOMNode)){ +self.appendChild(other)}else{try{ var items=_b_.list.$factory(other) items.forEach(function(item){DOMNode.__le__(self,item)})}catch(err){throw _b_.TypeError.$factory("can't add '"+ -$B.class_name(other)+"' object to DOMNode instance")}}} -DOMNode.__len__=function(self){return self.elt.length} +$B.class_name(other)+"' object to DOMNode instance")}} +return true } +DOMNode.__len__=function(self){return self.length} DOMNode.__mul__=function(self,other){if(_b_.isinstance(other,_b_.int)&& other.valueOf()> 0){var res=TagSum.$factory() var pos=res.children.length for(var i=0;i < other.valueOf();i++){res.children[pos++]=DOMNode.clone(self)()} @@ -12434,22 +13526,27 @@ throw _b_.ValueError.$factory("can't multiply "+self.__class__+ "by "+other)} DOMNode.__ne__=function(self,other){return ! DOMNode.__eq__(self,other)} DOMNode.__next__=function(self){self.$counter++ -if(self.$counter < self.elt.childNodes.length){return DOMNode.$factory(self.elt.childNodes[self.$counter])} +if(self.$counter < self.childNodes.length){return DOMNode.$factory(self.childNodes[self.$counter])} throw _b_.StopIteration.$factory("StopIteration")} DOMNode.__radd__=function(self,other){ var res=TagSum.$factory() var txt=DOMNode.$factory(document.createTextNode(other)) res.children=[txt,self] return res} -DOMNode.__str__=DOMNode.__repr__=function(self){var proto=Object.getPrototypeOf(self.elt) +DOMNode.__str__=DOMNode.__repr__=function(self){var attrs=self.attributes,attrs_str="" +if(attrs !==undefined){var items=[] +for(var i=0;i < attrs.length;i++){items.push(attrs[i].name+'="'+ +self.getAttributeNS(null,attrs[i].name)+'"')}} +var proto=Object.getPrototypeOf(self) if(proto){var name=proto.constructor.name if(name===undefined){ var proto_str=proto.constructor.toString() name=proto_str.substring(8,proto_str.length-1)} -return "<"+name+" object>"} +items.splice(0,0,name) +return "<"+items.join(" ")+">"} var res=""} +return res+$NodeTypes[self.nodeType]+"' name '"+ +self.nodeName+"'"+attrs_str+">"} DOMNode.__setattr__=function(self,attr,value){ if(attr.substr(0,2)=="on"){ if(!$B.$bool(value)){ @@ -12458,7 +13555,7 @@ DOMNode.bind(self,attr.substr(2),value)}}else{switch(attr){case "left": case "top": case "width": case "height": -if(_b_.isinstance(value,_b_.int)&& self.elt.nodeType==1){self.elt.style[attr]=value+"px" +if(_b_.isinstance(value,_b_.int)&& self.nodeType==1){self.style[attr]=value+"px" return _b_.None}else{throw _b_.ValueError.$factory(attr+" value should be"+ " an integer, not "+$B.class_name(value))} break} @@ -12469,7 +13566,7 @@ if($B.debug > 0){var info=frame[1].$line_info.split(",") console.log("module",info[1],"line",info[0]) if($B.$py_src.hasOwnProperty(info[1])){var src=$B.$py_src[info[1]] console.log(src.split("\n")[parseInt(info[0])-1])}}else{console.log("module",frame[2])}} -var proto=Object.getPrototypeOf(self.elt),nb=0 +var proto=Object.getPrototypeOf(self),nb=0 while(!!proto && proto !==Object.prototype && nb++< 10){var descriptors=Object.getOwnPropertyDescriptors(proto) if(!!descriptors && typeof descriptors.hasOwnProperty=="function"){if(descriptors.hasOwnProperty(attr)){if(!descriptors[attr].writable && @@ -12478,128 +13575,128 @@ descriptors[attr].set===undefined){warn("Warning: property '"+attr+ attr+"'] instead.")} break}}else{break} proto=Object.getPrototypeOf(proto)} -if(self.elt.style && self.elt.style[attr]!==undefined){warn("Warning: '"+attr+"' is a property of element.style")} -self.elt[attr]=value +if(self.style && self.style[attr]!==undefined){warn("Warning: '"+attr+"' is a property of element.style")} +self[attr]=value return _b_.None}} -DOMNode.__setitem__=function(self,key,value){if(typeof key=="number"){self.elt.childNodes[key]=value}else if(typeof key=="string"){if(self.elt.attributes){if(self.elt instanceof SVGElement){self.elt.setAttributeNS(null,key,value)}else if(typeof self.elt.setAttribute=="function"){self.elt.setAttribute(key,value)}}}} -DOMNode.abs_left={__get__:function(self){return $getPosition(self.elt).left},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+ +DOMNode.__setitem__=function(self,key,value){if(typeof key=="number"){self.childNodes[key]=value}else if(typeof key=="string"){if(self.attributes){if(self instanceof SVGElement){self.setAttributeNS(null,key,value)}else if(typeof self.setAttribute=="function"){self.setAttribute(key,value)}}}} +DOMNode.abs_left={__get__:function(self){return $getPosition(self).left},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+ "'abs_left' is read-only")}} -DOMNode.abs_top={__get__:function(self){return $getPosition(self.elt).top},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+ +DOMNode.abs_top={__get__:function(self){return $getPosition(self).top},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+ "'abs_top' is read-only")}} +DOMNode.attach=DOMNode.__le__ DOMNode.bind=function(self,event){ -var $=$B.args("bind",4,{self:null,event:null,func:null,options:null},["self","event","func","options"],arguments,{options:_b_.None},null,null),self=$.self,event=$.event,func=$.func,options=$.options -var callback=(function(f){return function(ev){try{return f($DOMEvent(ev))}catch(err){if(err.__class__ !==undefined){var msg=$B.$getattr(err,"info")+ -"\n"+$B.class_name(err) -if(err.args){msg+=": "+err.args[0]} -try{$B.$getattr($B.stderr,"write")(msg)} -catch(err){console.log(msg)}}else{try{$B.$getattr($B.stderr,"write")(err)} +var $=$B.args("bind",4,{self:null,event:null,func:null,options:null},["self","event","func","options"],arguments,{func:_b_.None,options:_b_.None},null,null),self=$.self,event=$.event,func=$.func,options=$.options +if(func===_b_.None){ +return function(f){return DOMNode.bind(self,event,f)}} +var callback=(function(f){return function(ev){try{return f($DOMEvent(ev))}catch(err){if(err.__class__ !==undefined){$B.handle_error(err)}else{try{$B.$getattr($B.stderr,"write")(err)} catch(err1){console.log(err)}}}}} )(func) callback.$infos=func.$infos callback.$attrs=func.$attrs ||{} callback.$func=func -if(typeof options=="boolean"){self.elt.addEventListener(event,callback,options)}else if(options.__class__===_b_.dict){self.elt.addEventListener(event,callback,options.$string_dict)}else if(options===_b_.None){self.elt.addEventListener(event,callback,false)} -self.elt.$events=self.elt.$events ||{} -self.elt.$events[event]=self.elt.$events[event]||[] -self.elt.$events[event].push([func,callback]) +if(typeof options=="boolean"){self.addEventListener(event,callback,options)}else if(options.__class__===_b_.dict){self.addEventListener(event,callback,_b_.dict.$to_obj(options))}else if(options===_b_.None){self.addEventListener(event,callback,false)} +self.$events=self.$events ||{} +self.$events[event]=self.$events[event]||[] +self.$events[event].push([func,callback]) return self} -DOMNode.children=function(self){var res=[],elt=self.elt -console.log(elt,elt.childNodes) -if(elt.nodeType==9){elt=elt.body} -elt.childNodes.forEach(function(child){res.push(DOMNode.$factory(child))}) +DOMNode.children=function(self){var res=[] +if(self.nodeType==9){self=self.body} +self.childNodes.forEach(function(child){res.push(DOMNode.$factory(child))}) return res} DOMNode.clear=function(self){ -var elt=self.elt -if(elt.nodeType==9){elt=elt.body} -while(elt.firstChild){elt.removeChild(elt.firstChild)}} -DOMNode.Class=function(self){if(self.elt.className !==undefined){return self.elt.className} +if(self.nodeType==9){self=self.body} +while(self.firstChild){self.removeChild(self.firstChild)}} +DOMNode.Class=function(self){if(self.className !==undefined){return self.className} return _b_.None} DOMNode.class_name=function(self){return DOMNode.Class(self)} -DOMNode.clone=function(self){var res=DOMNode.$factory(self.elt.cloneNode(true)) -var events=self.elt.$events ||{} +DOMNode.clone=function(self){var res=DOMNode.$factory(self.cloneNode(true)) +var events=self.$events ||{} for(var event in events){var evt_list=events[event] evt_list.forEach(function(evt){var func=evt[0] DOMNode.bind(res,event,func)})} return res} -DOMNode.closest=function(self,tagName){ -var res=self.elt,tagName=tagName.toLowerCase() -while(res.tagName.toLowerCase()!=tagName){res=res.parentNode -if(res===undefined ||res.tagName===undefined){throw _b_.KeyError.$factory("no parent of type "+tagName)}} +DOMNode.closest=function(self,selector){ +var res=self.closest(selector) +if(res===null){throw _b_.KeyError.$factory("no parent with selector "+selector)} return DOMNode.$factory(res)} -DOMNode.events=function(self,event){self.elt.$events=self.elt.$events ||{} -var evt_list=self.elt.$events[event]=self.elt.$events[event]||[],callbacks=[] +DOMNode.events=function(self,event){self.$events=self.$events ||{} +var evt_list=self.$events[event]=self.$events[event]||[],callbacks=[] evt_list.forEach(function(evt){callbacks.push(evt[1])}) return callbacks} DOMNode.focus=function(self){return(function(obj){return function(){ -setTimeout(function(){obj.focus()},10)}})(self.elt)} +setTimeout(function(){obj.focus()},10)}})(self)} function make_list(node_list){var res=[] for(var i=0;i < node_list.length;i++){res.push(DOMNode.$factory(node_list[i]))} return res} DOMNode.get=function(self){ -var obj=self.elt,args=[] +var args=[] for(var i=1;i < arguments.length;i++){args.push(arguments[i])} var $ns=$B.args("get",0,{},[],args,{},null,"kw"),$dict={},items=_b_.list.$factory(_b_.dict.items($ns["kw"])) items.forEach(function(item){$dict[item[0]]=item[1]}) -if($dict["name"]!==undefined){if(obj.getElementsByName===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ +if($dict["name"]!==undefined){if(self.getElementsByName===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ "selection by name")} -return make_list(obj.getElementsByName($dict['name']))} -if($dict["tag"]!==undefined){if(obj.getElementsByTagName===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ +return make_list(self.getElementsByName($dict['name']))} +if($dict["tag"]!==undefined){if(self.getElementsByTagName===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ "selection by tag name")} -return make_list(obj.getElementsByTagName($dict["tag"]))} -if($dict["classname"]!==undefined){if(obj.getElementsByClassName===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ +return make_list(self.getElementsByTagName($dict["tag"]))} +if($dict["classname"]!==undefined){if(self.getElementsByClassName===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ "selection by class name")} -return make_list(obj.getElementsByClassName($dict['classname']))} -if($dict["id"]!==undefined){if(obj.getElementById===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ +return make_list(self.getElementsByClassName($dict['classname']))} +if($dict["id"]!==undefined){if(self.getElementById===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ "selection by id")} var id_res=document.getElementById($dict['id']) if(! id_res){return[]} return[DOMNode.$factory(id_res)]} -if($dict["selector"]!==undefined){if(obj.querySelectorAll===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ +if($dict["selector"]!==undefined){if(self.querySelectorAll===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ "selection by selector")} -return make_list(obj.querySelectorAll($dict['selector']))} +return make_list(self.querySelectorAll($dict['selector']))} return res} DOMNode.getContext=function(self){ -if(!("getContext" in self.elt)){throw _b_.AttributeError.$factory("object has no attribute 'getContext'")} -var obj=self.elt -return function(ctx){return JSObject.$factory(obj.getContext(ctx))}} +if(!("getContext" in self)){throw _b_.AttributeError.$factory("object has no attribute 'getContext'")} +return function(ctx){return $B.JSObj.$factory(self.getContext(ctx))}} DOMNode.getSelectionRange=function(self){ -if(self.elt["getSelectionRange"]!==undefined){return self.elt.getSelectionRange.apply(null,arguments)}} -DOMNode.html=function(self){var res=self.elt.innerHTML -if(res===undefined){if(self.elt.nodeType==9){res=self.elt.body.innerHTML} +if(self["getSelectionRange"]!==undefined){return self.getSelectionRange.apply(null,arguments)}} +DOMNode.html=function(self){var res=self.innerHTML +if(res===undefined){if(self.nodeType==9){res=self.body.innerHTML} else{res=_b_.None}} return res} -DOMNode.id=function(self){if(self.elt.id !==undefined){return self.elt.id} +DOMNode.id=function(self){if(self.id !==undefined){return self.id} return _b_.None} DOMNode.index=function(self,selector){var items -if(selector===undefined){items=self.elt.parentElement.childNodes}else{items=self.elt.parentElement.querySelectorAll(selector)} +if(selector===undefined){items=self.parentElement.childNodes}else{items=self.parentElement.querySelectorAll(selector)} var rank=-1 -for(var i=0;i < items.length;i++){if(items[i]===self.elt){rank=i;break}} +for(var i=0;i < items.length;i++){if(items[i]===self){rank=i;break}} return rank} DOMNode.inside=function(self,other){ -other=other.elt -var elt=self.elt +var elt=self while(true){if(other===elt){return true} elt=elt.parentElement if(! elt){return false}}} DOMNode.options=function(self){ -return new $OptionsClass(self.elt)} -DOMNode.parent=function(self){if(self.elt.parentElement){return DOMNode.$factory(self.elt.parentElement)} +return new $OptionsClass(self)} +DOMNode.parent=function(self){if(self.parentElement){return DOMNode.$factory(self.parentElement)} return _b_.None} DOMNode.reset=function(self){ -return function(){self.elt.reset()}} +return function(){self.reset()}} +DOMNode.scrolled_left={__get__:function(self){return $getPosition(self).left- +document.scrollingElement.scrollLeft},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+ +"'scrolled_left' is read-only")}} +DOMNode.scrolled_top={__get__:function(self){return $getPosition(self).top- +document.scrollingElement.scrollTop},__set__:function(){throw _b_.AttributeError.$factory("'DOMNode' objectattribute "+ +"'scrolled_top' is read-only")}} DOMNode.select=function(self,selector){ -if(self.elt.querySelectorAll===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ +if(self.querySelectorAll===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ "selection by selector")} -return make_list(self.elt.querySelectorAll(selector))} +return make_list(self.querySelectorAll(selector))} DOMNode.select_one=function(self,selector){ -if(self.elt.querySelector===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ +if(self.querySelector===undefined){throw _b_.TypeError.$factory("DOMNode object doesn't support "+ "selection by selector")} -var res=self.elt.querySelector(selector) +var res=self.querySelector(selector) if(res===null){return _b_.None} return DOMNode.$factory(res)} DOMNode.style=function(self){ -self.elt.style.float=self.elt.style.cssFloat ||self.style.styleFloat -return $B.JSObject.$factory(self.elt.style)} +self.style.float=self.style.cssFloat ||self.styleFloat +return $B.JSObj.$factory(self.style)} DOMNode.setSelectionRange=function(self){ if(this["setSelectionRange"]!==undefined){return(function(obj){return function(){return obj.setSelectionRange.apply(obj,arguments)}})(this)}else if(this["createTextRange"]!==undefined){return(function(obj){return function(start_pos,end_pos){if(end_pos==undefined){end_pos=start_pos} var range=obj.createTextRange() @@ -12607,52 +13704,49 @@ range.collapse(true) range.moveEnd("character",start_pos) range.moveStart("character",end_pos) range.select()}})(this)}} -DOMNode.set_class_name=function(self,arg){self.elt.setAttribute("class",arg)} -DOMNode.set_html=function(self,value){var elt=self.elt -if(elt.nodeType==9){elt=elt.body} -elt.innerHTML=_b_.str.$factory(value)} +DOMNode.set_class_name=function(self,arg){self.setAttribute("class",arg)} +DOMNode.set_html=function(self,value){if(self.nodeType==9){self=self.body} +self.innerHTML=_b_.str.$factory(value)} DOMNode.set_style=function(self,style){ if(!_b_.isinstance(style,_b_.dict)){throw _b_.TypeError.$factory("style must be dict, not "+ $B.class_name(style))} var items=_b_.list.$factory(_b_.dict.items(style)) for(var i=0;i < items.length;i++){var key=items[i][0],value=items[i][1] -if(key.toLowerCase()=="float"){self.elt.style.cssFloat=value -self.elt.style.styleFloat=value}else{switch(key){case "top": +if(key.toLowerCase()=="float"){self.style.cssFloat=value +self.style.styleFloat=value}else{switch(key){case "top": case "left": case "width": case "borderWidth": if(_b_.isinstance(value,_b_.int)){value=value+"px"}} -self.elt.style[key]=value}}} -DOMNode.set_text=function(self,value){var elt=self.elt -if(elt.nodeType==9){elt=elt.body} -elt.innerText=_b_.str.$factory(value) -elt.textContent=_b_.str.$factory(value)} -DOMNode.set_value=function(self,value){self.elt.value=_b_.str.$factory(value)} +self.style[key]=value}}} +DOMNode.set_text=function(self,value){if(self.nodeType==9){self=self.body} +self.innerText=_b_.str.$factory(value) +self.textContent=_b_.str.$factory(value)} +DOMNode.set_value=function(self,value){self.value=_b_.str.$factory(value)} DOMNode.submit=function(self){ -return function(){self.elt.submit()}} -DOMNode.text=function(self){var elt=self.elt -if(elt.nodeType==9){elt=elt.body} -var res=elt.innerText ||elt.textContent +return function(){self.submit()}} +DOMNode.text=function(self){if(self.nodeType==9){self=self.body} +var res=self.innerText ||self.textContent if(res===null){res=_b_.None} return res} DOMNode.toString=function(self){if(self===undefined){return 'DOMNode'} -return self.elt.nodeName} +return self.nodeName} DOMNode.trigger=function(self,etype){ -if(self.elt.fireEvent){self.elt.fireEvent("on"+etype)}else{var evObj=document.createEvent("Events") +if(self.fireEvent){self.fireEvent("on"+etype)}else{var evObj=document.createEvent("Events") evObj.initEvent(etype,true,false) -self.elt.dispatchEvent(evObj)}} +self.dispatchEvent(evObj)}} DOMNode.unbind=function(self,event){ -self.elt.$events=self.elt.$events ||{} -if(self.elt.$events==={}){return _b_.None} -if(event===undefined){for(var event in self.elt.$events){DOMNode.unbind(self,event)} +self.$events=self.$events ||{} +if(self.$events==={}){return _b_.None} +if(event===undefined){for(var event in self.$events){DOMNode.unbind(self,event)} return _b_.None} -if(self.elt.$events[event]===undefined || -self.elt.$events[event].length==0){return _b_.None} -var events=self.elt.$events[event] +if(self.$events[event]===undefined || +self.$events[event].length==0){return _b_.None} +var events=self.$events[event] if(arguments.length==2){ for(var i=0;i < events.length;i++){var callback=events[i][1] -self.elt.removeEventListener(event,callback,false)} -self.elt.$events[event]=[] +self.removeEventListener(event,callback,false)} +self.$events[event]=[] return _b_.None} for(var i=2;i < arguments.length;i++){var callback=arguments[i],flag=false,func=callback.$func if(func===undefined){ @@ -12661,22 +13755,26 @@ for(var j=0;j < events.length;j++){if(events[j][0]===callback){var func=callback break}} if(!found){throw _b_.TypeError.$factory("function is not an event callback")}} for(var j=0;j < events.length;j++){if($B.$getattr(func,'__eq__')(events[j][0])){var callback=events[j][1] -self.elt.removeEventListener(event,callback,false) +self.removeEventListener(event,callback,false) events.splice(j,1) flag=true break}} if(!flag){throw _b_.KeyError.$factory('missing callback for event '+event)}}} -$B.set_func_names(DOMNode,"") -var Query={__class__:_b_.type,$infos:{__name__:"query"}} +$B.set_func_names(DOMNode,"browser") +var Query={__class__:_b_.type,__mro__:[_b_.object],$infos:{__name__:"query"}} Query.__contains__=function(self,key){return self._keys.indexOf(key)>-1} Query.__getitem__=function(self,key){ var result=self._values[key] -if(result===undefined){throw _b_.KeyError.$factory(key)} -if(result.length==1){return result[0]} +if(result===undefined){throw _b_.KeyError.$factory(key)}else if(result.length==1){return result[0]} return result} var Query_iterator=$B.make_iterator_class("query string iterator") Query.__iter__=function(self){return Query_iterator.$factory(self._keys)} -Query.__mro__=[object] +Query.__setitem__=function(self,key,value){self._values[key]=[value] +return _b_.None} +Query.__str__=Query.__repr__=function(self){ +var elts=[] +for(var key in self._values){for(const val of self._values[key]){elts.push(encodeURIComponent(key)+"="+encodeURIComponent(val))}} +if(elts.length==0){return ""}else{return "?"+elts.join("&")}} Query.getfirst=function(self,key,_default){ var result=self._values[key] if(result===undefined){if(_default===undefined){return _b_.None} @@ -12690,13 +13788,7 @@ Query.getvalue=function(self,key,_default){try{return Query.__getitem__(self,key catch(err){if(_default===undefined){return _b_.None} return _default}} Query.keys=function(self){return self._keys} -DOMNode.query=function(self){var res={__class__:Query,_keys :[],_values :{}} -var qs=location.search.substr(1).split('&') -for(var i=0;i < qs.length;i++){var pos=qs[i].search("="),elts=[qs[i].substr(0,pos),qs[i].substr(pos+1)],key=decodeURIComponent(elts[0]),value=decodeURIComponent(elts[1]) -if(res._keys.indexOf(key)>-1){res._values[key].push(value)} -else{res._keys.push(key) -res._values[key]=[value]}} -return res} +$B.set_func_names(Query,"") var TagSum={__class__ :_b_.type,__mro__:[object],$infos:{__module__:"",__name__:"TagSum"}} TagSum.appendChild=function(self,child){self.children.push(child)} TagSum.__add__=function(self,other){if($B.get_class(other)===TagSum){self.children=self.children.concat(other.children)}else if(_b_.isinstance(other,[_b_.str,_b_.int,_b_.float,_b_.dict,_b_.set,_b_.list])){self.children=self.children.concat( @@ -12718,7 +13810,7 @@ TagSum.$factory=function(){return{ __class__:TagSum,children:[],toString:function(){return "(TagSum)"}}} $B.set_func_names(TagSum,"") $B.TagSum=TagSum -var win=JSObject.$factory(_window) +var win=$B.JSObj.$factory(_window) win.get_postMessage=function(msg,targetOrigin){if(_b_.isinstance(msg,dict)){var temp={__class__:"dict"},items=_b_.list.$factory(_b_.dict.items(msg)) items.forEach(function(item){temp[item[0]]=item[1]}) msg=temp} @@ -12730,309 +13822,74 @@ $B.win=win})(__BRYTHON__) var _b_=$B.builtins var bltns=$B.InjectBuiltins() eval(bltns) -function rstrip(s,strip_chars){var _chars=strip_chars ||" \t\n"; -var nstrip=0,len=s.length; -while(nstrip < len && _chars.indexOf(s.charAt(len-1-nstrip))>-1)nstrip++; -return s.substr(0,len-nstrip)} -function jscode_namespace(iter_name,action,parent_id){var _clean=''; -if(action==='store'){_clean=' = {}'} -var res='for(var attr in this.blocks){'+ -'eval("var " + attr + " = this.blocks[attr]")'+ -'};'+ -'\nvar $locals_'+iter_name+' = this.env'+_clean+', '+ -'\n $local_name = "'+iter_name+'", '+ -'\n $locals = $locals_'+iter_name+','+ -'\n $yield;' -if(parent_id){res+='$locals.$parent = $locals_'+parent_id.replace(/\./g,"_")+ -';'} -return res} -function make_node(top_node,node){ -if(node.type==="marker"){return} -var is_cond=false,is_except=false,is_else=false,is_continue,ctx_js -if(node.C.$genjs){ctx_js=node.C.$genjs}else{ctx_js=node.C.$genjs=node.C.to_js()} -if(node.locals_def){var parent_id=node.func_node.parent_block.id -if(node.func_node.ntype=="generator"){ -var iter_name=top_node.iter_id -ctx_js=jscode_namespace(iter_name,'store',parent_id)}else{ctx_js+="$locals.$parent = $locals_"+parent_id+";"}} -if(node.is_catch){is_except=true;is_cond=true} -if(node.is_except){is_except=true} -if(node.C.type=="node"){var ctx=node.C.tree[0] -var ctype=ctx.type -switch(ctx.type){case "except": -is_except=true -is_cond=true -break -case "single_kw": -is_cond=true -if(ctx.token=="else"){is_else=true} -if(ctx.token=="finally"){is_except=true} -break -case "condition": -if(ctx.token=="elif"){is_else=true;is_cond=true} -if(ctx.token=="if"){is_cond=true}}} -if(ctx_js){ -var new_node=new $B.genNode(ctx_js) -new_node.line_num=node.line_num -if(ctype=="yield"){ -var ctx_manager=in_ctx_manager(node) -var yield_node_id=top_node.yields.length -while(ctx_js.endsWith(";")){ctx_js=ctx_js.substr(0,ctx_js.length-1)} -var res="return ["+ctx_js+", "+yield_node_id+"]" -if(ctx_manager !==undefined){res="$yield = true;"+res} -new_node.data=res -top_node.yields.push(new_node)}else if(node.is_set_yield_value){ -var ctx_manager -if(node.after_yield){ctx_manager=in_ctx_manager(node)} -var js="var sent_value = this.sent_value === undefined ? "+ -"None : this.sent_value;",h="\n"+' '.repeat(node.indent) -js+=h+"this.sent_value = None" -js+=h+"if(sent_value.__class__ === $B.$GeneratorSendError)"+ -"{throw sent_value.err};" -if(typeof ctx_js=="number"){js+=h+"var $yield_value"+ctx_js+" = sent_value;"} -if(ctx_manager !==undefined){js+=h+"$yield = true;" } -new_node.data=js}else if(ctype=="break" ||ctype=="continue"){ -new_node["is_"+ctype]=true -new_node.loop_num=node.C.tree[0].loop_ctx.loop_num} -new_node.is_yield=(ctype=="yield" ||ctype=="return") -new_node.is_cond=is_cond -new_node.is_except=is_except -new_node.is_if=ctype=="condition" && ctx.token=="if" -new_node.is_try=node.is_try -new_node.is_else=is_else -new_node.loop_start=node.loop_start -new_node.is_set_yield_value=node.is_set_yield_value -for(var i=0,len=node.children.length;i < len;i++){var nd=make_node(top_node,node.children[i]) -if(nd !==undefined){new_node.addChild(nd)}}} -return new_node} -$B.genNode=function(data,parent){this.data=data -this.parent=parent -this.children=[] -this.has_child=false -if(parent===undefined){this.nodes={} -this.num=0} -this.addChild=function(child){if(child===undefined){console.log("child of "+this+" undefined")} -this.children[this.children.length]=child -this.has_child=true -child.parent=this -child.rank=this.children.length-1} -this.insert=function(pos,child){if(child===undefined){console.log("child of "+this+" undefined")} -this.children.splice(pos,0,child) -this.has_child=true -child.parent=this -child.rank=pos } -this.clone=function(){var res=new $B.genNode(this.data) -res.has_child=this.has_child -res.is_cond=this.is_cond -res.is_except=this.is_except -res.is_if=this.is_if -res.is_try=this.is_try -res.is_else=this.is_else -res.loop_num=this.loop_num -res.loop_start=this.loop_start -res.is_yield=this.is_yield -res.line_num=this.line_num -return res} -this.clone_tree=function(exit_node,head){ -var res=new $B.genNode(this.data) -if(this.replaced && ! in_loop(this)){ -console.log("already replaced",this) -res.data="void(0)"} -if(this===exit_node &&(this.parent.is_cond ||! in_loop(this))){ -if(! exit_node.replaced){ -console.log("replace by void(0)",this) -res=new $B.genNode("void(0)")}else{res=new $B.genNode(exit_node.data)} -exit_node.replaced=true} -if(head &&(this.is_break ||this.is_continue)){var loop=in_loop(this) -if(loop.has("yield")){res.data="" -if(this.is_break){res.data+='$locals["$no_break'+this.loop_num+ -'"] = false;'} -res.data+='var err = new Error("break"); '+ -"err.__class__ = $B.GeneratorBreak; throw err;" -res.is_break=this.is_break}else{res.is_break=this.is_break}} -res.is_continue=this.is_continue -res.has_child=this.has_child -res.is_cond=this.is_cond -res.is_except=this.is_except -res.is_try=this.is_try -res.is_else=this.is_else -res.loop_num=this.loop_num -res.loop_start=this.loop_start -res.no_break=true -res.is_yield=this.is_yield -res.line_num=this.line_num -for(var i=0,len=this.children.length;i < len;i++){res.addChild(this.children[i].clone_tree(exit_node,head)) -if(this.children[i].is_break){res.no_break=false}} -return res} -this.has=function(keyword){ -if(this["is_"+keyword]){return true} -else{for(var i=0,len=this.children.length;i < len;i++){if(this.children[i].loop_start !==undefined){ -continue} -if(this.children[i].has(keyword)){return true}}} -return false} -this.indent_src=function(indent){return " ".repeat(indent*indent)} -this.src=function(indent){ -indent=indent ||0 -var res=[this.indent_src(indent)+this.data],pos=1 -if(this.has_child){res[pos++]="{"} -res[pos++]="\n" -for(var i=0,len=this.children.length;i < len;i++){res[pos++]=this.children[i].src(indent+1) -if(this.children[i].is_yield){break}} -if(this.has_child){res[pos++]="\n"+this.indent_src(indent)+"}\n"} -return res.join("")} -this.toString=function(){return ""}} -$B.GeneratorBreak=$B.make_class("GeneratorBreak") -$B.$GeneratorSendError={} var $GeneratorReturn={} $B.generator_return=function(value){return{__class__:$GeneratorReturn,value:value}} -function in_ctx_manager(node){ -var ctx_manager,parent=node.parent -while(parent && parent.ntype !=="generator"){ctx_manager=parent.ctx_manager_num -if(ctx_manager !==undefined){return ctx_manager} -parent=parent.parent}} -function in_loop(node){ -while(node){if(node.loop_start !==undefined){return node} -node=node.parent} -return false} -function in_try(node){ -var tries=[],pnode=node.parent,pos=0 -while(pnode){if(pnode.is_try){tries[pos++]=pnode} -pnode=pnode.parent} -return tries} -var $BRGeneratorDict={__class__:_b_.type,$infos:{__name__:"generator",__module__:"builtins"},$is_class:true} -$B.gen_counter=0 -function remove_line_nums(node){ -for(var i=0;i < node.children.length;i++){if(node.children[i].is_line_num){node.children.splice(i,1)}else{remove_line_nums(node.children[i])}}} -$B.$BRgenerator=function(func_name,blocks,def_id,def_node){ -var def_ctx=def_node.C.tree[0] -var module=def_node.module, -iter_id=def_id -if($B.debug > 0){ -$B.$add_line_num(def_node,def_ctx.rank)} -var func_root=new $B.genNode(def_ctx.to_js()) -remove_line_nums(def_node.parent) -func_root.module=module -func_root.yields=[] -func_root.loop_ends={} -func_root.def_id=def_id -func_root.iter_id=iter_id -for(var i=0,len=def_node.children.length;i < len;i++){var nd=make_node(func_root,def_node.children[i]) -if(nd===undefined){continue} -func_root.addChild(nd)} -var obj={__class__ :$BRGeneratorDict,blocks:blocks,def_ctx:def_ctx,def_id:def_id,func_name:func_name,func_root:func_root,module:module,gi_running:false,iter_id:iter_id,id:iter_id,num:0} -var src=func_root.src(),raw_src=src.substr(src.search("function")) -raw_src+="return "+def_ctx.name+def_ctx.num+"}" -var funcs=[raw_src] -obj.parent_block=def_node -for(var i=0;i < func_root.yields.length;i++){funcs.push(make_next(obj,i))} -return funcs} -function make_next(self,yield_node_id){ -var exit_node=self.func_root.yields[yield_node_id] -exit_node.replaced=false -var root=new $B.genNode(self.def_ctx.to_js()) -var fnode=self.func_root.clone() -root.addChild(fnode) -var parent_scope=self.func_root -var js=jscode_namespace(self.iter_id,'restore') -fnode.addChild(new $B.genNode(js)) -js='var $top_frame = ["'+self.iter_id+'",$locals,"'+self.module+ -'",$locals_'+self.module.replace(/\./g,'_')+'];'+ -'$B.frames_stack.push($top_frame); var $stack_length = '+ -'$B.frames_stack.length;' -fnode.addChild(new $B.genNode(js)) -while(1){ -var exit_parent=exit_node.parent,rest=[],pos=0,has_break,has_continue -var start=exit_node.rank+1 -if(exit_node.loop_start !==undefined){ -start=exit_node.rank}else if(exit_node.is_cond){ -while(start < exit_parent.children.length && -(exit_parent.children[start].is_except || -exit_parent.children[start].is_else)){start++}}else if(exit_node.is_try ||exit_node.is_except){ -while(start < exit_parent.children.length && -(exit_parent.children[start].is_except || -exit_parent.children[start].is_else)){start++}} -for(var i=start,len=exit_parent.children.length;i < len;i++){var clone=exit_parent.children[i].clone_tree(null,true) -if(clone.is_continue){ -break} -if(clone.has("continue")){has_continue=true;} -rest[pos++]=clone -if(clone.has("break")){has_break=true}} -if((has_break ||has_continue)&& rest.length > 0){ -var rest_try=new $B.genNode("try") -for(var i=0,len=rest.length;i < len;i++){rest_try.addChild(rest[i])} -var catch_test="catch(err)"+ -"{if(err.__class__ !== $B.GeneratorBreak){throw err}}" -catch_test=new $B.genNode(catch_test) -rest=[rest_try,catch_test] -if(exit_parent.loop_start !==undefined){var test='if($locals["$no_break'+exit_parent.loop_start+ -'"])',test_node=new $B.genNode(test) -test_node.addChild(rest_try) -test_node.addChild(catch_test) -rest=[test_node]}} -var tries=in_try(exit_node) -if(tries.length==0){ -for(var i=0;i < rest.length;i++){fnode.addChild(rest[i])}}else{ -var tree=[],pos=0 -for(var i=0;i < tries.length;i++){var try_node=tries[i],try_clone=try_node.clone() -if(i==0){for(var j=0;j < rest.length;j++){try_clone.addChild(rest[j])}} -var children=[try_clone],cpos=1 -for(var j=try_node.rank+1; -j < try_node.parent.children.length;j++){if(try_node.parent.children[j].is_except){children[cpos++]= -try_node.parent.children[j].clone_tree(null,true)}else{break}} -tree[pos++]=children} -var parent=fnode -while(tree.length){children=tree.pop() -children.forEach(function(child){parent.addChild(child)}) -parent=children[0]}} -exit_node=exit_parent -if(exit_node===self.func_root){break}} -var src=root.children[0].src(),next_src=src.substr(src.search("function")) -next_src=next_src.substr(10) -next_src=next_src.substr(next_src.search("function")) -return next_src} -var generator={__class__:_b_.type,__mro__:[_b_.object],$infos:{__module__:"builtins",__name__:"generator"}} -generator.__enter__=function(self){console.log("generator.__enter__ called")} -generator.__exit__=function(self){console.log("generator.__exit__ called")} -generator.__str__=function(self){return ""} -generator.__iter__=function(self){return self} -generator.__next__=function(self){if(self.$finished){throw _b_.StopIteration.$factory(_b_.None)} -if(self.gi_running===true){throw ValueError.$factory("generator already executing")} +$B.generator=$B.make_class("generator",function(func){var res=function(){var gen=func.apply(null,arguments) +gen.$name=func.name +gen.$func=func +gen.$has_run=false +gen.__class__=$B.generator +if(func.$has_yield_in_cm){var locals=$B.last($B.frames_stack)[1] +locals.$close_generators=locals.$close_generators ||[] +locals.$close_generators.push(gen)} +return gen} +res.$infos=func.$infos +res.$is_genfunc=true +return res} +) +$B.generator.__iter__=function(self){return self} +$B.generator.__next__=function(self){return $B.generator.send(self,_b_.None)} +$B.generator.close=function(self){try{$B.generator.$$throw(self,_b_.GeneratorExit.$factory())}catch(err){if(! $B.is_exc(err,[_b_.GeneratorExit,_b_.StopIteration])){throw _b_.RuntimeError.$factory("generator ignored GeneratorExit")}}} +$B.generator.send=function(self,value){ +self.$has_run=true +if(self.$finished){throw _b_.StopIteration.$factory(value)} +if(self.gi_running===true){throw _b_.ValueError.$factory("generator already executing")} self.gi_running=true -if(self.next===undefined){self.$finished=true -throw _b_.StopIteration.$factory(_b_.None)} -try{var res=self.next.apply(self,self.args)}catch(err){ -self.$finished=true -err.$stack=$B.frames_stack.slice() -throw err}finally{ -self.gi_running=false -$B.leave_frame(self.iter_id)} -if(res===undefined){throw _b_.StopIteration.$factory(_b_.None)} -else if(res[0].__class__===$GeneratorReturn){ -self.$finished=true -throw StopIteration.$factory(res[0].value)} -self.next=self.nexts[res[1]] +try{var res=self.next(value)}catch(err){self.$finished=true +throw err} +if(res.value && res.value.__class__===$GeneratorReturn){self.$finished=true +throw _b_.StopIteration.$factory(res.value.value)} self.gi_running=false -return res[0]} -generator.close=function(self,value){self.sent_value=_b_.GeneratorExit.$factory() -try{var res=generator.__next__(self) -if(res !==_b_.None){throw _b_.RuntimeError.$factory("closed generator returned a value")}}catch(err){if($B.is_exc(err,[_b_.StopIteration,_b_.GeneratorExit])){return _b_.None} -throw err}} -generator.send=function(self,value){self.sent_value=value -return generator.__next__(self)} -generator.$$throw=function(self,type,value,traceback){var exc=type -if(value !==undefined){exc=$B.$call(exc)(value)} +if(res.done){throw _b_.StopIteration.$factory(res.value)} +return res.value} +$B.generator.$$throw=function(self,type,value,traceback){var exc=type +if(exc.$is_class){if(! _b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+ +"instance of BaseException")}else if(value===undefined){exc=$B.$call(exc)()}else if(_b_.isinstance(value,type)){exc=value}}else{if(value===undefined){value=exc}else{exc=$B.$call(exc)(value)}} if(traceback !==undefined){exc.$traceback=traceback} -self.sent_value={__class__:$B.$GeneratorSendError,err:exc} -return generator.__next__(self)} -generator.$factory=$B.genfunc=function(name,blocks,funcs,$defaults){ -if(name.startsWith("__ge")){ -for(var block_id in blocks){if(block_id=="$locals_"+name){continue} -for(var attr in blocks[block_id]){blocks["$locals_"+name][attr]=blocks[block_id][attr]}}} -return function(){var iter_id="$gen"+$B.gen_counter++,gfuncs=[] -gfuncs.push(funcs[0]($defaults)) -for(var i=1;i < funcs.length;i++){gfuncs.push(funcs[i])} -var res={__class__:generator,__name__:name,args:Array.prototype.slice.call(arguments),blocks:blocks,env:{},name:name,nexts:gfuncs.slice(1),next:gfuncs[0],iter_id:iter_id,gi_running:false,$started:false,$defaults:$defaults,$is_generator_obj:true} -return res}} -$B.set_func_names(generator,"builtins")})(__BRYTHON__) +var res=self.throw(exc) +if(res.done){throw _b_.StopIteration.$factory("StopIteration")} +return res.value} +$B.set_func_names($B.generator,"builtins") +$B.async_generator=$B.make_class("async_generator",function(func){var f=function(){var res=func.apply(null,arguments) +res.__class__=$B.async_generator +return res} +return f} +) +var ag_closed={} +$B.async_generator.__aiter__=function(self){return self} +$B.async_generator.__anext__=function(self){return $B.async_generator.asend(self,_b_.None)} +$B.async_generator.aclose=function(self){self.$finished=true +return _b_.None} +$B.async_generator.asend=async function(self,value){if(self.$finished){throw _b_.StopAsyncIteration.$factory(value)} +if(self.ag_running===true){throw _b_.ValueError.$factory("generator already executing")} +self.ag_running=true +try{var res=await self.next(value)}catch(err){self.$finished=true +throw err} +if(res.done){throw _b_.StopAsyncIteration.$factory(value)} +if(res.value.__class__===$GeneratorReturn){self.$finished=true +throw _b_.StopAsyncIteration.$factory(res.value.value)} +self.ag_running=false +return res.value} +$B.async_generator.athrow=async function(self,type,value,traceback){var exc=type +if(exc.$is_class){if(! _b_.issubclass(type,_b_.BaseException)){throw _b_.TypeError.$factory("exception value must be an "+ +"instance of BaseException")}else if(value===undefined){value=$B.$call(exc)()}}else{if(value===undefined){value=exc}else{exc=$B.$call(exc)(value)}} +if(traceback !==undefined){exc.$traceback=traceback} +await self.throw(value)} +$B.set_func_names($B.async_generator,"builtins") +function rstrip(s,strip_chars){var _chars=strip_chars ||" \t\n"; +var nstrip=0,len=s.length; +while(nstrip < len && _chars.indexOf(s.charAt(len-1-nstrip))>-1)nstrip++; +return s.substr(0,len-nstrip)}})(__BRYTHON__) ; ;(function($B){var _b_=$B.builtins var update=function(mod,data){for(attr in data){mod[attr]=data[attr]}} @@ -13044,10 +13901,9 @@ var $=$B.args("bind",3,{elt:null,evt:null,options:null},["elt","evt","options"], var options=$.options if(typeof options=="boolean"){} else if(options.__class__===_b_.dict){options=options.$string_dict}else{options==false} -return function(callback){if($.elt.__class__ && -_b_.issubclass($.elt.__class__,$B.JSObject)){ -function f(ev){try{return callback($B.JSObject.$factory(ev))}catch(err){$B.handle_error(err)}} -$.elt.js.addEventListener($.evt,f,options) +return function(callback){if($B.get_class($.elt)===$B.JSObj){ +function f(ev){try{return callback($B.JSObj.$factory(ev))}catch(err){$B.handle_error(err)}} +$.elt.addEventListener($.evt,f,options) return callback}else if(_b_.isinstance($.elt,$B.DOMNode)){ $B.DOMNode.bind($.elt,$.evt,callback,options) return callback}else if(_b_.isinstance($.elt,_b_.str)){ @@ -13059,17 +13915,19 @@ while(true){try{var elt=_b_.next(it) $B.DOMNode.bind(elt,$.evt,callback)}catch(err){if(_b_.isinstance(err,_b_.StopIteration)){break} throw err}}}catch(err){if(_b_.isinstance(err,_b_.AttributeError)){$B.DOMNode.bind($.elt,$.evt,callback)} throw err} -return callback}},console:$B.JSObject.$factory(self.console),self:$B.win,win:$B.win,$$window:$B.win,} +return callback}},console:self.console && $B.JSObj.$factory(self.console),self:$B.win,win:$B.win,$$window:$B.win,} browser.__path__=browser.__file__ -if($B.isWebWorker){browser.is_webworker=true +if($B.isNode){delete browser.$$window +delete browser.win}else if($B.isWebWorker){browser.is_webworker=true delete browser.$$window delete browser.win -browser.self.js.send=self.postMessage}else{browser.is_webworker=false -update(browser,{$$alert:function(message){window.alert($B.builtins.str.$factory(message))},confirm:$B.JSObject.$factory(window.confirm),$$document:$B.DOMNode.$factory(document),doc:$B.DOMNode.$factory(document), -DOMEvent:$B.DOMEvent,DOMNode:$B.DOMNode.$factory,load:function(script_url){ +browser.self.send=self.postMessage}else{ +browser.is_webworker=false +update(browser,{$$alert:function(message){window.alert($B.builtins.str.$factory(message ||""))},confirm:$B.JSObj.$factory(window.confirm),$$document:$B.DOMNode.$factory(document),doc:$B.DOMNode.$factory(document), +DOMEvent:$B.DOMEvent,DOMNode:$B.DOMNode,load:function(script_url){ var file_obj=$B.builtins.open(script_url) -var content=$B.builtins.getattr(file_obj,'read')() -eval(content)},mouseCoords:function(ev){return $B.JSObject.$factory($mouseCoords(ev))},prompt:function(message,default_value){return $B.JSObject.$factory(window.prompt(message,default_value||''))},reload:function(){ +var content=$B.$getattr(file_obj,'read')() +eval(content)},mouseCoords:function(ev){return $B.JSObj.$factory($mouseCoords(ev))},prompt:function(message,default_value){return $B.JSObj.$factory(window.prompt(message,default_value||''))},reload:function(){ var scripts=document.getElementsByTagName('script'),js_scripts=[] scripts.forEach(function(script){if(script.type===undefined || script.type=='text/javascript'){js_scripts.push(script) @@ -13081,13 +13939,6 @@ var regex=new RegExp("[\\?&]"+name+"=([^&#]*)"),results=regex.exec(location.sear results=results===null ? "" : decodeURIComponent(results[1].replace(/\+/g," ")); return $B.builtins.str.$factory(results);}}) -$B.createWebComponent=function(cls){class WebComp extends HTMLElement{ -constructor(){ -super(); -if(this.__init__){this.__init__()}}} -for(key in cls){if(typeof cls[key]=="function"){WebComp.prototype[key]=(function(attr){return function(){return __BRYTHON__.pyobj2jsobj(cls[attr]).call(null,this,...arguments)}})(key)}} -customElements.define(cls.tag_name,WebComp) -return WebComp} modules['browser.html']=(function($B){var _b_=$B.builtins var TagSum=$B.TagSum function makeTagDict(tagName){ @@ -13095,13 +13946,12 @@ var dict={__class__:_b_.type,$infos:{__name__:tagName,__module__:"browser.html"} dict.__init__=function(){var $ns=$B.args('pow',1,{self:null},['self'],arguments,{},'args','kw'),self=$ns['self'],args=$ns['args'] if(args.length==1){var first=args[0] if(_b_.isinstance(first,[_b_.str,_b_.int,_b_.float])){ -self.elt.innerHTML=_b_.str.$factory(first)}else if(first.__class__===TagSum){for(var i=0,len=first.children.length;i < len;i++){self.elt.appendChild(first.children[i].elt)}}else{if(_b_.isinstance(first,$B.DOMNode)){self.elt.appendChild(first.elt)}else{try{ +self.innerHTML=_b_.str.$factory(first)}else if(first.__class__===TagSum){for(var i=0,len=first.children.length;i < len;i++){self.appendChild(first.children[i])}}else{if(_b_.isinstance(first,$B.DOMNode)){self.appendChild(first)}else{try{ var items=_b_.list.$factory(first) -items.forEach(function(item){$B.DOMNode.__le__(self,item)})}catch(err){console.log(err) +items.forEach(function(item){$B.DOMNode.__le__(self,item)})}catch(err){if($B.debug > 1){console.log(err,err.__class__,err.args) console.log("first",first) -console.log(arguments) -throw _b_.ValueError.$factory( -'wrong element '+_b_.str.$factory(first))}}}} +console.log(arguments)} +$B.handle_error(err)}}}} var items=_b_.list.$factory(_b_.dict.items($ns['kw'])) for(var i=0,len=items.length;i < len;i++){ var arg=items[i][0],value=items[i][1] @@ -13109,8 +13959,10 @@ if(arg.toLowerCase().substr(0,2)=="on"){ var js='$B.DOMNode.bind(self,"'+ arg.toLowerCase().substr(2) eval(js+'",function(){'+value+'})')}else if(arg.toLowerCase()=="style"){$B.DOMNode.set_style(self,value)}else{if(value !==false){ -try{arg=arg.replace('_','-') -self.elt.setAttribute(arg,value)}catch(err){throw _b_.ValueError.$factory( +try{ +arg=$B.imported["browser.html"]. +attribute_mapper(arg) +self.setAttribute(arg,value)}catch(err){throw _b_.ValueError.$factory( "can't set attribute "+arg)}}}}} dict.__mro__=[$B.DOMNode,$B.builtins.object] dict.__new__=function(cls){ @@ -13121,7 +13973,7 @@ var res=$B.DOMNode.$factory(elt,true) res._wrapped=true }else{var res=$B.DOMNode.$factory(document.createElement(tagName),true) res._wrapped=false } res.__class__=cls -res.__dict__=_b_.dict.$factory() +res.__dict__=$B.empty_dict() return res} $B.set_func_names(dict,"browser.html") return dict} @@ -13129,7 +13981,7 @@ function makeFactory(klass){var factory=function(){if(klass.$elt_wrap !==undefin var elt=klass.$elt_wrap klass.$elt_wrap=undefined var res=$B.DOMNode.$factory(elt,true) -res._wrapped=true }else{if(klass.$infos.__name__=='SVG'){var res=$B.DOMNode.$factory(document.createElementNS("http://www.w3.org/2000/svg","svg"),true)}else{var res=$B.DOMNode.$factory(document.createElement(klass.$infos.__name__),true)} +res._wrapped=true }else{if(klass.$infos.__name__=='SVG'){var res=$B.DOMNode.$factory(document.createElementNS("http://www.w3.org/2000/svg","svg"),true)}else{var elt=document.createElement(klass.$infos.__name__),res=$B.DOMNode.$factory(elt,true)} res._wrapped=false } res.__class__=klass klass.__init__(res,...arguments) @@ -13138,44 +13990,105 @@ return factory} var tags=['A','ABBR','ACRONYM','ADDRESS','APPLET','AREA','B','BASE','BASEFONT','BDO','BIG','BLOCKQUOTE','BODY','BR','BUTTON','CAPTION','CENTER','CITE','CODE','COL','COLGROUP','DD','DEL','DFN','DIR','DIV','DL','DT','EM','FIELDSET','FONT','FORM','FRAME','FRAMESET','H1','H2','H3','H4','H5','H6','HEAD','HR','HTML','I','IFRAME','IMG','INPUT','INS','ISINDEX','KBD','LABEL','LEGEND','LI','LINK','MAP','MENU','META','NOFRAMES','NOSCRIPT','OBJECT','OL','OPTGROUP','OPTION','P','PARAM','PRE','Q','S','SAMP','SCRIPT','SELECT','SMALL','SPAN','STRIKE','STRONG','STYLE','SUB','SUP','SVG','TABLE','TBODY','TD','TEXTAREA','TFOOT','TH','THEAD','TITLE','TR','TT','U','UL','VAR', 'ARTICLE','ASIDE','AUDIO','BDI','CANVAS','COMMAND','DATA','DATALIST','EMBED','FIGCAPTION','FIGURE','FOOTER','HEADER','KEYGEN','MAIN','MARK','MATH','METER','NAV','OUTPUT','PROGRESS','RB','RP','RT','RTC','RUBY','SECTION','SOURCE','TEMPLATE','TIME','TRACK','VIDEO','WBR', 'DETAILS','DIALOG','MENUITEM','PICTURE','SUMMARY'] -var obj={tags:_b_.dict.$factory()},dicts={} +var obj={tags:$B.empty_dict()},dicts={} $B.DOMNode.tags=obj.tags function maketag(tag){if(!(typeof tag=='string')){throw _b_.TypeError.$factory("html.maketag expects a string as argument")} var klass=dicts[tag]=makeTagDict(tag) klass.$factory=makeFactory(klass) -obj.tags.$string_dict[tag]=klass +_b_.dict.$setitem(obj.tags,tag,klass) return klass} tags.forEach(function(tag){obj[tag]=maketag(tag)}) obj.maketag=maketag +obj.attribute_mapper=function(attr){return attr.replace(/_/g,'-')} return obj})(__BRYTHON__)} modules['browser']=browser +$B.UndefinedClass=$B.make_class("Undefined",function(){return $B.Undefined} +) +$B.UndefinedClass.__mro__=[_b_.object] +$B.UndefinedClass.__bool__=function(self){return false} +$B.UndefinedClass.__repr__=$B.UndefinedClass.__str__=function(self){return ""} +$B.Undefined={__class__:$B.UndefinedClass} +$B.set_func_names($B.UndefinedClass,"javascript") modules['javascript']={$$this:function(){ if($B.js_this===undefined){return $B.builtins.None} -return $B.JSObject.$factory($B.js_this)},$$Date:$B.JSObject.$factory(self.Date),JSConstructor:{__get__:function(){console.warn('"javascript.JSConstructor" is deprecrated. '+ -'Use window..new() instead.') -return $B.JSConstructor},__set__:function(){throw _b_.AttributeError.$factory("read only")}},JSObject:{__get__:function(){console.warn('"javascript.JSObject" is deprecrated. To use '+ -'a Javascript object, use window. instead.') -return $B.JSObject},__set__:function(){throw _b_.AttributeError.$factory("read only")}},JSON:{__class__:$B.make_class("JSON"),parse:function(s){return $B.structuredclone2pyobj(JSON.parse(s))},stringify:function(obj){return JSON.stringify($B.pyobj2structuredclone(obj))}},jsobj2pyobj:function(obj){return $B.jsobj2pyobj(obj)},load:function(script_url){console.log('"javascript.load" is deprecrated. '+ +return $B.JSObj.$factory($B.js_this)},$$Date:self.Date && $B.JSObj.$factory(self.Date),$$extends:function(js_constr){return function(obj){if(typeof obj=="function"){var res=function(){js_constr.call(this,...arguments) +obj.apply(this,arguments)} +res.prototype=Object.create(js_constr.prototype) +res.prototype.constructor=res +res.$is_js_func=true +return res}else if(obj.$is_class){console.log("obj",obj) +if(js_constr.$js_func.name=="Named"){console.log("-- Named")} +var res=function(){console.log("call parent class",obj.$parent_class) +obj.$parent_class.call(this,...arguments) +if(obj.$$constructor){var args=[this] +for(var i=0,len=arguments.length;i < len;i++){args.push(arguments[i])} +obj.$$constructor.apply(this,args)}} +res.prototype=Object.create(js_constr.prototype) +res.prototype.constructor=res +res.$is_js_func=true +res.$class=obj +obj.$parent_class=js_constr +for(var attr in obj.__dict__.$string_dict){var value=obj.__dict__.$string_dict[attr][0] +if(typeof value=="function"){res.prototype[attr]=(function(x){return function(){var args=[this] +for(var i=0,len=arguments.length;i < len;i++){args.push($B.pyobj2jsobj(arguments[i]))} +return x.apply(this,args)}})(value)}else{res.prototype[attr]=$B.pyobj2jsobj(value)}} +return res}}},JSON:{__class__:$B.make_class("JSON"),parse:function(){return $B.structuredclone2pyobj( +JSON.parse.apply(this,arguments))},stringify:function(obj,replacer,space){return JSON.stringify($B.pyobj2structuredclone(obj,false),$B.JSObj.$factory(replacer),space)}},jsobj2pyobj:function(obj){return $B.jsobj2pyobj(obj)},load:function(script_url){console.log('"javascript.load" is deprecrated. '+ 'Use browser.load instead.') var file_obj=$B.builtins.open(script_url) -var content=$B.builtins.getattr(file_obj,'read')() -eval(content)},$$Math:$B.JSObject.$factory(self.Math),NULL:null,$$Number:$B.JSObject.$factory(self.Number),py2js:function(src,module_name){if(module_name===undefined){module_name='__main__'+$B.UUID()} -return $B.py2js(src,module_name,module_name,$B.builtins_scope).to_js()},pyobj2jsobj:function(obj){return $B.pyobj2jsobj(obj)},$$RegExp:$B.JSObject.$factory(self.RegExp),$$String:$B.JSObject.$factory(self.String),UNDEFINED:undefined} +var content=$B.$getattr(file_obj,'read')() +eval(content)},$$Math:self.Math && $B.JSObj.$factory(self.Math),NULL:null,$$Number:self.Number && $B.JSObj.$factory(self.Number),py2js:function(src,module_name){if(module_name===undefined){module_name='__main__'+$B.UUID()} +return $B.py2js(src,module_name,module_name,$B.builtins_scope).to_js()},pyobj2jsobj:function(obj){return $B.pyobj2jsobj(obj)},$$RegExp:self.RegExp && $B.JSObj.$factory(self.RegExp),$$String:self.String && $B.JSObj.$factory(self.String),UNDEFINED:$B.Undefined,UndefinedType:$B.UndefinedClass} +var arraybuffers=["Int8Array","Uint8Array","Uint8ClampedArray","Int16Array","Uint16Array","Int32Array","Uint32Array","Float32Array","Float64Array","BigInt64Array","BigUint64Array"] +arraybuffers.forEach(function(ab){if(self[ab]!==undefined){modules['javascript'][ab]=$B.JSObj.$factory(self[ab])}}) var _b_=$B.builtins modules['_sys']={ -Getframe :function(depth){return $B._frame.$factory($B.frames_stack,depth)},exc_info:function(){for(var i=$B.frames_stack.length-1;i >=0;i--){var frame=$B.frames_stack[i],exc=frame[1].$current_exception -if(exc){return _b_.tuple.$factory([exc.__class__,exc,$B.$getattr(exc,"traceback")])}} -return _b_.tuple.$factory([_b_.None,_b_.None,_b_.None])},modules:{__get__:function(){return $B.obj_dict($B.imported)},__set__:function(self,obj,value){throw _b_.TypeError.$factory("Read only property 'sys.modules'")}},path:{__get__:function(){return $B.path},__set__:function(self,obj,value){$B.path=value;}},meta_path:{__get__:function(){return $B.meta_path},__set__:function(self,obj,value){$B.meta_path=value }},path_hooks:{__get__:function(){return $B.path_hooks},__set__:function(self,obj,value){$B.path_hooks=value }},path_importer_cache:{__get__:function(){return _b_.dict.$factory($B.JSObject.$factory($B.path_importer_cache))},__set__:function(self,obj,value){throw _b_.TypeError.$factory("Read only property"+ -" 'sys.path_importer_cache'")}},stderr:{__get__:function(){return $B.stderr},__set__:function(self,obj,value){$B.stderr=value},write:function(data){_b_.getattr($B.stderr,"write")(data)}},stdout:{__get__:function(){return $B.stdout},__set__:function(self,obj,value){$B.stdout=value},write:function(data){_b_.getattr($B.stdout,"write")(data)}},stdin:{__get__:function(){return $B.stdin},__set__:function(){throw _b_.TypeError.$factory("sys.stdin is read-only")}},vfs:{__get__:function(){if($B.hasOwnProperty("VFS")){return $B.obj_dict($B.VFS)} -else{return _b_.None}},__set__:function(){throw _b_.TypeError.$factory("Read only property 'sys.vfs'")}}} +Getframe :function(){var $=$B.args("_getframe",1,{depth:null},['depth'],arguments,{depth:0},null,null),depth=$.depth +return $B._frame.$factory($B.frames_stack,$B.frames_stack.length-depth-1)},breakpointhook:function(){var hookname=$B.$options.breakpoint,modname,dot,funcname,hook +if(hookname===undefined){hookname="pdb.set_trace"} +[modname,dot,funcname]=_b_.str.rpartition(hookname,'.') +if(dot==""){modname="builtins"} +try{$B.$import(modname) +hook=$B.$getattr($B.imported[modname],funcname)}catch(err){console.warn("cannot import breakpoint",hookname) +return _b_.None} +return $B.$call(hook).apply(null,arguments)},exc_info:function(){for(var i=$B.frames_stack.length-1;i >=0;i--){var frame=$B.frames_stack[i],exc=frame[1].$current_exception +if(exc){return _b_.tuple.$factory([exc.__class__,exc,$B.$getattr(exc,"__traceback__")])}} +return _b_.tuple.$factory([_b_.None,_b_.None,_b_.None])},excepthook:function(exc_class,exc_value,traceback){$B.handle_error(exc_value)},gettrace:function(){return $B.tracefunc ||_b_.None},modules:_b_.property.$factory( +function(){return $B.obj_dict($B.imported)},function(self,obj,value){throw _b_.TypeError.$factory("Read only property 'sys.modules'")} +),path:_b_.property.$factory( +function(){return $B.path},function(self,obj,value){$B.path=value;} +),meta_path:_b_.property.$factory( +function(){return $B.meta_path},function(self,obj,value){$B.meta_path=value } +),path_hooks:_b_.property.$factory( +function(){return $B.path_hooks},function(self,obj,value){$B.path_hooks=value } +),path_importer_cache:_b_.property.$factory( +function(){return _b_.dict.$factory($B.JSObj.$factory($B.path_importer_cache))},function(self,obj,value){throw _b_.TypeError.$factory("Read only property"+ +" 'sys.path_importer_cache'")} +),settrace:function(){var $=$B.args("settrace",1,{tracefunc:null},['tracefunc'],arguments,{},null,null) +$B.tracefunc=$.tracefunc +$B.last($B.frames_stack)[1].$f_trace=$B.tracefunc +$B.tracefunc.$current_frame_id=$B.last($B.frames_stack)[0] +return _b_.None},stderr:_b_.property.$factory( +function(){return $B.stderr},function(self,value){$B.stderr=value} +),stdout:_b_.property.$factory( +function(){return $B.stdout},function(self,value){$B.stdout=value} +),stdin:_b_.property.$factory( +function(){return $B.stdin},function(self,value){$B.stdin=value} +),vfs:_b_.property.$factory( +function(){if($B.hasOwnProperty("VFS")){return $B.obj_dict($B.VFS)} +else{return _b_.None}},function(){throw _b_.TypeError.$factory("Read only property 'sys.vfs'")} +)} +modules._sys.__breakpointhook__=modules._sys.breakpointhook +modules._sys.stderr.write=function(data){return $B.$getattr(_sys.stderr.__get__(),"write")(data)} +modules._sys.stdout.write=function(data){return $B.$getattr(_sys.stdout.__get__(),"write")(data)} function load(name,module_obj){ module_obj.__class__=$B.module module_obj.__name__=name $B.imported[name]=module_obj for(var attr in module_obj){if(typeof module_obj[attr]=='function'){var attr1=$B.from_alias(attr) -module_obj[attr].$infos={__name__:attr1,__qualname__:name+'.'+attr1}}}} +module_obj[attr].$infos={__module__:name,__name__:attr1,__qualname__:name+'.'+attr1}}}} for(var attr in modules){load(attr,modules[attr])} -if(! $B.isWebWorker){modules['browser'].html=modules['browser.html']} +if(!($B.isWebWorker ||$B.isNode)){modules['browser'].html=modules['browser.html']} var _b_=$B.builtins _b_.__builtins__=$B.module.$factory('__builtins__','Python builtins') for(var attr in _b_){_b_.__builtins__[attr]=_b_[attr] @@ -13205,6 +14118,34 @@ if(self.$cell_contents===null){if(other.$cell_contents===null){return op=="__eq_ return $B.rich_comp(op,self.$cell_contents,other.$cell_contents)}})(op)}) $B.set_func_names($B.cell,"builtins")})(__BRYTHON__) ; +var docs={ArithmeticError:"Base class for arithmetic errors.",AssertionError:"Assertion failed.",AttributeError:"Attribute not found.",BaseException:"Common base class for all exceptions",BlockingIOError:"I/O operation would block.",BrokenPipeError:"Broken pipe.",BufferError:"Buffer error.",BytesWarning:"Base class for warnings about bytes and buffer related problems, mostly\nrelated to conversion from str or comparing to str.",ChildProcessError:"Child process error.",ConnectionAbortedError:"Connection aborted.",ConnectionError:"Connection error.",ConnectionRefusedError:"Connection refused.",ConnectionResetError:"Connection reset.",DeprecationWarning:"Base class for warnings about deprecated features.",EOFError:"Read beyond end of file.",Ellipsis:"",EnvironmentError:"Base class for I/O related errors.",Exception:"Common base class for all non-exit exceptions.",False:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",FileExistsError:"File already exists.",FileNotFoundError:"File not found.",FloatingPointError:"Floating point operation failed.",FutureWarning:"Base class for warnings about constructs that will change semantically\nin the future.",GeneratorExit:"Request that a generator exit.",IOError:"Base class for I/O related errors.",ImportError:"Import can't find module, or can't find name in module.",ImportWarning:"Base class for warnings about probable mistakes in module imports",IndentationError:"Improper indentation.",IndexError:"Sequence index out of range.",InterruptedError:"Interrupted by signal.",IsADirectoryError:"Operation doesn't work on directories.",KeyError:"Mapping key not found.",KeyboardInterrupt:"Program interrupted by user.",LookupError:"Base class for lookup errors.",MemoryError:"Out of memory.",NameError:"Name not found globally.",None:"",NotADirectoryError:"Operation only works on directories.",NotImplemented:"",NotImplementedError:"Method or function hasn't been implemented yet.",OSError:"Base class for I/O related errors.",OverflowError:"Result too large to be represented.",PendingDeprecationWarning:"Base class for warnings about features which will be deprecated\nin the future.",PermissionError:"Not enough permissions.",ProcessLookupError:"Process not found.",ReferenceError:"Weak ref proxy used after referent went away.",ResourceWarning:"Base class for warnings about resource usage.",RuntimeError:"Unspecified run-time error.",RuntimeWarning:"Base class for warnings about dubious runtime behavior.",StopIteration:"Signal the end from iterator.__next__().",SyntaxError:"Invalid syntax.",SyntaxWarning:"Base class for warnings about dubious syntax.",SystemError:"Internal error in the Python interpreter.\n\nPlease report this to the Python maintainer, along with the traceback,\nthe Python version, and the hardware/OS platform and version.",SystemExit:"Request to exit from the interpreter.",TabError:"Improper mixture of spaces and tabs.",TimeoutError:"Timeout expired.",True:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",TypeError:"Inappropriate argument type.",UnboundLocalError:"Local name referenced but not bound to a value.",UnicodeDecodeError:"Unicode decoding error.",UnicodeEncodeError:"Unicode encoding error.",UnicodeError:"Unicode related error.",UnicodeTranslateError:"Unicode translation error.",UnicodeWarning:"Base class for warnings about Unicode related problems, mostly\nrelated to conversion problems.",UserWarning:"Base class for warnings generated by user code.",ValueError:"Inappropriate argument value (of correct type).",Warning:"Base class for warning categories.",WindowsError:"Base class for I/O related errors.",ZeroDivisionError:"Second argument to a division or modulo operation was zero.",__debug__:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",abs:"abs(number) -> number\n\nReturn the absolute value of the argument.",all:"all(iterable) -> bool\n\nReturn True if bool(x) is True for all values x in the iterable.\nIf the iterable is empty, return True.",any:"any(iterable) -> bool\n\nReturn True if bool(x) is True for any x in the iterable.\nIf the iterable is empty, return False.",ascii:"ascii(object) -> string\n\nAs repr(), return a string containing a printable representation of an\nobject, but escape the non-ASCII characters in the string returned by\nrepr() using \\x, \\u or \\U escapes. This generates a string similar\nto that returned by repr() in Python 2.",bin:"bin(number) -> string\n\nReturn the binary representation of an integer.\n\n >>> bin(2796202)\n '0b1010101010101010101010'\n",bool:"bool(x) -> bool\n\nReturns True when the argument x is true, False otherwise.\nThe builtins True and False are the only two instances of the class bool.\nThe class bool is a subclass of the class int, and cannot be subclassed.",bytearray:"bytearray(iterable_of_ints) -> bytearray\nbytearray(string, encoding[, errors]) -> bytearray\nbytearray(bytes_or_buffer) -> mutable copy of bytes_or_buffer\nbytearray(int) -> bytes array of size given by the parameter initialized with null bytes\nbytearray() -> empty bytes array\n\nConstruct an mutable bytearray object from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - a bytes or a buffer object\n - any object implementing the buffer API.\n - an integer",bytes:"bytes(iterable_of_ints) -> bytes\nbytes(string, encoding[, errors]) -> bytes\nbytes(bytes_or_buffer) -> immutable copy of bytes_or_buffer\nbytes(int) -> bytes object of size given by the parameter initialized with null bytes\nbytes() -> empty bytes object\n\nConstruct an immutable array of bytes from:\n - an iterable yielding integers in range(256)\n - a text string encoded using the specified encoding\n - any object implementing the buffer API.\n - an integer",callable:"callable(object) -> bool\n\nReturn whether the object is callable (i.e., some kind of function).\nNote that classes are callable, as are instances of classes with a\n__call__() method.",chr:"chr(i) -> Unicode character\n\nReturn a Unicode string of one character with ordinal i; 0 <= i <= 0x10ffff.",classmethod:"classmethod(function) -> method\n\nConvert a function to be a class method.\n\nA class method receives the class as implicit first argument,\njust like an instance method receives the instance.\nTo declare a class method, use this idiom:\n\n class C:\n def f(cls, arg1, arg2, ...): ...\n f = classmethod(f)\n\nIt can be called either on the class (e.g. C.f()) or on an instance\n(e.g. C().f()). The instance is ignored except for its class.\nIf a class method is called for a derived class, the derived class\nobject is passed as the implied first argument.\n\nClass methods are different than C++ or Java static methods.\nIf you want those, see the staticmethod builtin.",compile:"compile(source, filename, mode[, flags[, dont_inherit]]) -> code object\n\nCompile the source (a Python module, statement or expression)\ninto a code object that can be executed by exec() or eval().\nThe filename will be used for run-time error messages.\nThe mode must be 'exec' to compile a module, 'single' to compile a\nsingle (interactive) statement, or 'eval' to compile an expression.\nThe flags argument, if present, controls which future statements influence\nthe compilation of the code.\nThe dont_inherit argument, if non-zero, stops the compilation inheriting\nthe effects of any future statements in effect in the code calling\ncompile; if absent or zero these statements do influence the compilation,\nin addition to any features explicitly specified.",complex:"complex(real[, imag]) -> complex number\n\nCreate a complex number from a real part and an optional imaginary part.\nThis is equivalent to (real + imag*1j) where imag defaults to 0.",copyright:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",credits:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.",delattr:"delattr(object, name)\n\nDelete a named attribute on an object; delattr(x, 'y') is equivalent to\n``del x.y''.",dict:"dict() -> new empty dictionary\ndict(mapping) -> new dictionary initialized from a mapping object's\n (key, value) pairs\ndict(iterable) -> new dictionary initialized as if via:\n d = {}\n for k, v in iterable:\n d[k] = v\ndict(**kwargs) -> new dictionary initialized with the name=value pairs\n in the keyword argument list. For example: dict(one=1, two=2)",dir:"dir([object]) -> list of strings\n\nIf called without an argument, return the names in the current scope.\nElse, return an alphabetized list of names comprising (some of) the attributes\nof the given object, and of attributes reachable from it.\nIf the object supplies a method named __dir__, it will be used; otherwise\nthe default dir() logic is used and returns:\n for a module object: the module's attributes.\n for a class object: its attributes, and recursively the attributes\n of its bases.\n for any other object: its attributes, its class's attributes, and\n recursively the attributes of its class's base classes.",divmod:"divmod(x, y) -> (div, mod)\n\nReturn the tuple ((x-x%y)/y, x%y). Invariant: div*y + mod == x.",enumerate:"enumerate(iterable[, start]) -> iterator for index, value of iterable\n\nReturn an enumerate object. iterable must be another object that supports\niteration. The enumerate object yields pairs containing a count (from\nstart, which defaults to zero) and a value yielded by the iterable argument.\nenumerate is useful for obtaining an indexed list:\n (0, seq[0]), (1, seq[1]), (2, seq[2]), ...",eval:"eval(source[, globals[, locals]]) -> value\n\nEvaluate the source in the C of globals and locals.\nThe source may be a string representing a Python expression\nor a code object as returned by compile().\nThe globals must be a dictionary and locals can be any mapping,\ndefaulting to the current globals and locals.\nIf only globals is given, locals defaults to it.\n",exec:"exec(object[, globals[, locals]])\n\nRead and execute code from an object, which can be a string or a code\nobject.\nThe globals and locals are dictionaries, defaulting to the current\nglobals and locals. If only globals is given, locals defaults to it.",exit:"",filter:"filter(function or None, iterable) --> filter object\n\nReturn an iterator yielding those items of iterable for which function(item)\nis true. If function is None, return the items that are true.",float:"float(x) -> floating point number\n\nConvert a string or number to a floating point number, if possible.",format:"format(value[, format_spec]) -> string\n\nReturns value.__format__(format_spec)\nformat_spec defaults to \"\"", +frozenset:"frozenset()-> empty frozenset object\nfrozenset(iterable)-> frozenset object\n\nBuild an immutable unordered collection of unique elements.", +getattr:"getattr(object,name[,default])-> value\n\nGet a named attribute from an object;getattr(x,'y')is equivalent to x.y.\nWhen a default argument is given,it is returned when the attribute doesn't\nexist; without it, an exception is raised in that case.", +globals:"globals() -> dictionary\n\nReturn the dictionary containing the current scope's global variables.", +hasattr:"hasattr(object,name)-> bool\n\nReturn whether the object has an attribute with the given name.\n(This is done by calling getattr(object,name)and catching AttributeError.)", +hash:"hash(object)-> integer\n\nReturn a hash value for the object. Two objects with the same value have\nthe same hash value. The reverse is not necessarily true,but likely.", +help:"Define the builtin 'help'.\n\n This is a wrapper around pydoc.help that provides a helpful message\n when 'help' is typed at the Python interactive prompt.\n\n Calling help()at the Python prompt starts an interactive help session.\n Calling help(thing)prints help for the python object 'thing'.\n ", +hex:"hex(number)-> string\n\nReturn the hexadecimal representation of an integer.\n\n >>> hex(3735928559)\n '0xdeadbeef'\n", +id:"id(object)-> integer\n\nReturn the identity of an object. This is guaranteed to be unique among\nsimultaneously existing objects.(Hint:it's the object's memory address.)", +input:"input([prompt])-> string\n\nRead a string from standard input. The trailing newline is stripped.\nIf the user hits EOF(Unix:Ctl-D,Windows:Ctl-Z+Return),raise EOFError.\nOn Unix,GNU readline is used if enabled. The prompt string,if given,\nis printed without a trailing newline before reading.", +int:"int(x=0)-> integer\nint(x,base=10)-> integer\n\nConvert a number or string to an integer,or return 0 if no arguments\nare given. If x is a number,return x.__int__(). For floating point\nnumbers,this truncates towards zero.\n\nIf x is not a number or if base is given,then x must be a string,\nbytes,or bytearray instance representing an integer literal in the\ngiven base. The literal can be preceded by '+' or '-' and be surrounded\nby whitespace. The base defaults to 10. Valid bases are 0 and 2-36.\nBase 0 means to interpret the base from the string as an integer literal.\n>>> int('0b100',base=0)\n4", +isinstance:"isinstance(object,class-or-type-or-tuple)-> bool\n\nReturn whether an object is an instance of a class or of a subclass thereof.\nWith a type as second argument,return whether that is the object's type.\nThe form using a tuple, isinstance(x, (A, B, ...)), is a shortcut for\nisinstance(x, A) or isinstance(x, B) or ... (etc.).", +issubclass:"issubclass(C, B) -> bool\n\nReturn whether class C is a subclass (i.e., a derived class) of class B.\nWhen using a tuple as the second argument issubclass(X, (A, B, ...)),\nis a shortcut for issubclass(X, A) or issubclass(X, B) or ... (etc.).", +iter:"iter(iterable) -> iterator\niter(callable, sentinel) -> iterator\n\nGet an iterator from an object. In the first form, the argument must\nsupply its own iterator, or be a sequence.\nIn the second form, the callable is called until it returns the sentinel.", +len:"len(object)\n\nReturn the number of items of a sequence or collection.", +license:"interactive prompt objects for printing the license text, a list of\n contributors and the copyright notice.", +list:"list() -> new empty list\nlist(iterable) -> new list initialized from iterable's items", +locals:"locals()-> dictionary\n\nUpdate and return a dictionary containing the current scope's local variables.", +map:"map(func, *iterables) --> map object\n\nMake an iterator that computes the function using arguments from\neach of the iterables. Stops when the shortest iterable is exhausted.", +max:"max(iterable, *[, default=obj, key=func]) -> value\nmax(arg1, arg2, *args, *[, key=func]) -> value\n\nWith a single iterable argument, return its biggest item. The\ndefault keyword-only argument specifies an object to return if\nthe provided iterable is empty.\nWith two or more arguments, return the largest argument.", +memoryview:"memoryview(object)\n\nCreate a new memoryview object which references the given object.", +min:"min(iterable, *[, default=obj, key=func]) -> value\nmin(arg1, arg2, *args, *[, key=func]) -> value\n\nWith a single iterable argument, return its smallest item. The\ndefault keyword-only argument specifies an object to return if\nthe provided iterable is empty.\nWith two or more arguments, return the smallest argument.", +next:"next(iterator[, default])\n\nReturn the next item from the iterator. If default is given and the iterator\nis exhausted, it is returned instead of raising StopIteration.", +object:"The most base type", +oct:"oct(number) -> string\n\nReturn the octal representation of an integer.\n\n >>> oct(342391)\n '0o1234567'\n", +open:"open(file, mode='r', buffering=-1, encoding=None,\n errors=None, newline=None, closefd=True, opener=None) -> file object\n\nOpen file and return a stream. Raise IOError upon failure.\n\nfile is either a text or byte string giving the name (and the path\nif the file isn't in the current working directory)of the file to\nbe opened or an integer file descriptor of the file to be\nwrapped.(If a file descriptor is given,it is closed when the\nreturned I/O object is closed,unless closefd is set to False.)\n\nmode is an optional string that specifies the mode in which the file\nis opened. It defaults to 'r' which means open for reading in text\nmode. Other common values are 'w' for writing(truncating the file if\nit already exists),'x' for creating and writing to a new file,and\n'a' for appending(which on some Unix systems,means that all writes\nappend to the end of the file regardless of the current seek position).\nIn text mode,if encoding is not specified the encoding used is platform\ndependent:locale.getpreferredencoding(False)is called to get the\ncurrent locale encoding.(For reading and writing raw bytes use binary\nmode and leave encoding unspecified.)The available modes are:\n\n========================================================================\nCharacter Meaning\n------------------------------------------------------------------------\n'r' open for reading(default)\n'w' open for writing,truncating the file first\n'x' create a new file and open it for writing\n'a' open for writing,appending to the end of the file if it exists\n'b' binary mode\n't' text mode(default)\n'+' open a disk file for updating(reading and writing)\n'U' universal newline mode(deprecated)\n========================================================================\n\nThe default mode is 'rt'(open for reading text). For binary random\naccess,the mode 'w+b' opens and truncates the file to 0 bytes,while\n'r+b' opens the file without truncation. The 'x' mode implies 'w' and\nraises an `FileExistsError` if the file already exists.\n\nPython distinguishes between files opened in binary and text modes,\neven when the underlying operating system doesn't. Files opened in\nbinary mode (appending 'b' to the mode argument) return contents as\nbytes objects without any decoding. In text mode (the default, or when\n't' is appended to the mode argument), the contents of the file are\nreturned as strings, the bytes having been first decoded using a\nplatform-dependent encoding or using the specified encoding if given.\n\n'U' mode is deprecated and will raise an exception in future versions\nof Python. It has no effect in Python 3. Use newline to control\nuniversal newlines mode.\n\nbuffering is an optional integer used to set the buffering policy.\nPass 0 to switch buffering off (only allowed in binary mode), 1 to select\nline buffering (only usable in text mode), and an integer > 1 to indicate\nthe size of a fixed-size chunk buffer. When no buffering argument is\ngiven, the default buffering policy works as follows:\n\n* Binary files are buffered in fixed-size chunks; the size of the buffer\n is chosen using a heuristic trying to determine the underlying device's\n \"block size\" and falling back on `io.DEFAULT_BUFFER_SIZE`.\n On many systems, the buffer will typically be 4096 or 8192 bytes long.\n\n* \"Interactive\" text files (files for which isatty() returns True)\n use line buffering. Other text files use the policy described above\n for binary files.\n\nencoding is the name of the encoding used to decode or encode the\nfile. This should only be used in text mode. The default encoding is\nplatform dependent, but any encoding supported by Python can be\npassed. See the codecs module for the list of supported encodings.\n\nerrors is an optional string that specifies how encoding errors are to\nbe handled---this argument should not be used in binary mode. Pass\n'strict' to raise a ValueError exception if there is an encoding error\n(the default of None has the same effect), or pass 'ignore' to ignore\nerrors. (Note that ignoring encoding errors can lead to data loss.)\nSee the documentation for codecs.register or run 'help(codecs.Codec)'\nfor a list of the permitted encoding error strings.\n\nnewline controls how universal newlines works (it only applies to text\nmode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as\nfollows:\n\n* On input, if newline is None, universal newlines mode is\n enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and\n these are translated into '\\n' before being returned to the\n caller. If it is '', universal newline mode is enabled, but line\n endings are returned to the caller untranslated. If it has any of\n the other legal values, input lines are only terminated by the given\n string, and the line ending is returned to the caller untranslated.\n\n* On output, if newline is None, any '\\n' characters written are\n translated to the system default line separator, os.linesep. If\n newline is '' or '\\n', no translation takes place. If newline is any\n of the other legal values, any '\\n' characters written are translated\n to the given string.\n\nIf closefd is False, the underlying file descriptor will be kept open\nwhen the file is closed. This does not work when a file name is given\nand must be True in that case.\n\nA custom opener can be used by passing a callable as *opener*. The\nunderlying file descriptor for the file object is then obtained by\ncalling *opener* with (*file*, *flags*). *opener* must return an open\nfile descriptor (passing os.open as *opener* results in functionality\nsimilar to passing None).\n\nopen() returns a file object whose type depends on the mode, and\nthrough which the standard file operations such as reading and writing\nare performed. When open() is used to open a file in a text mode ('w',\n'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open\na file in a binary mode, the returned class varies: in read binary\nmode, it returns a BufferedReader; in write binary and append binary\nmodes, it returns a BufferedWriter, and in read/write mode, it returns\na BufferedRandom.\n\nIt is also possible to use a string or bytearray as a file for both\nreading and writing. For strings StringIO can be used like a file\nopened in a text mode, and for bytes a BytesIO can be used like a file\nopened in a binary mode.\n",ord:"ord(c) -> integer\n\nReturn the integer ordinal of a one-character string.",pow:"pow(x, y[, z]) -> number\n\nWith two arguments, equivalent to x**y. With three arguments,\nequivalent to (x**y) % z, but may be more efficient (e.g. for ints).",print:"print(value, ..., sep=' ', end='\\n', file=sys.stdout, flush=False)\n\nPrints the values to a stream, or to sys.stdout by default.\nOptional keyword arguments:\nfile: a file-like object (stream); defaults to the current sys.stdout.\nsep: string inserted between values, default a space.\nend: string appended after the last value, default a newline.\nflush: whether to forcibly flush the stream.",property:"property(fget=None, fset=None, fdel=None, doc=None) -> property attribute\n\nfget is a function to be used for getting an attribute value, and likewise\nfset is a function for setting, and fdel a function for del'ing, an\nattribute. Typical use is to define a managed attribute x:\n\nclass C(object):\n def getx(self): return self._x\n def setx(self, value): self._x = value\n def delx(self): del self._x\n x = property(getx, setx, delx, \"I'm the 'x' property.\")\n\nDecorators make defining new properties or modifying existing ones easy:\n\nclass C(object):\n @property\n def x(self):\n \"I am the 'x' property.\"\n return self._x\n @x.setter\n def x(self, value):\n self._x = value\n @x.deleter\n def x(self):\n del self._x\n",quit:"",range:"range(stop) -> range object\nrange(start, stop[, step]) -> range object\n\nReturn a virtual sequence of numbers from start to stop by step.",repr:"repr(object) -> string\n\nReturn the canonical string representation of the object.\nFor most object types, eval(repr(object)) == object.",reversed:"reversed(sequence) -> reverse iterator over values of the sequence\n\nReturn a reverse iterator",round:"round(number[, ndigits]) -> number\n\nRound a number to a given precision in decimal digits (default 0 digits).\nThis returns an int when called with one argument, otherwise the\nsame type as the number. ndigits may be negative.",set:"set() -> new empty set object\nset(iterable) -> new set object\n\nBuild an unordered collection of unique elements.",setattr:"setattr(object, name, value)\n\nSet a named attribute on an object; setattr(x, 'y', v) is equivalent to\n``x.y = v''.",slice:"slice(stop)\nslice(start, stop[, step])\n\nCreate a slice object. This is used for extended slicing (e.g. a[0:10:2]).",sorted:"sorted(iterable, key=None, reverse=False) --> new sorted list",staticmethod:"staticmethod(function) -> method\n\nConvert a function to be a static method.\n\nA static method does not receive an implicit first argument.\nTo declare a static method, use this idiom:\n\n class C:\n def f(arg1, arg2, ...): ...\n f = staticmethod(f)\n\nIt can be called either on the class (e.g. C.f()) or on an instance\n(e.g. C().f()). The instance is ignored except for its class.\n\nStatic methods in Python are similar to those found in Java or C++.\nFor a more advanced concept, see the classmethod builtin.",str:"str(object='') -> str\nstr(bytes_or_buffer[, encoding[, errors]]) -> str\n\nCreate a new string object from the given object. If encoding or\nerrors is specified, then the object must expose a data buffer\nthat will be decoded using the given encoding and error handler.\nOtherwise, returns the result of object.__str__() (if defined)\nor repr(object).\nencoding defaults to sys.getdefaultencoding().\nerrors defaults to 'strict'.",sum:"sum(iterable[, start]) -> value\n\nReturn the sum of an iterable of numbers (NOT strings) plus the value\nof parameter 'start' (which defaults to 0). When the iterable is\nempty, return start.",super:"super() -> same as super(__class__, )\nsuper(type) -> unbound super object\nsuper(type, obj) -> bound super object; requires isinstance(obj, type)\nsuper(type, type2) -> bound super object; requires issubclass(type2, type)\nTypical use to call a cooperative superclass method:\nclass C(B):\n def meth(self, arg):\n super().meth(arg)\nThis works for class methods too:\nclass C(B):\n @classmethod\n def cmeth(cls, arg):\n super().cmeth(arg)\n",tuple:"tuple() -> empty tuple\ntuple(iterable) -> tuple initialized from iterable's items\n\nIf the argument is a tuple, the return value is the same object.",type:"type(object_or_name, bases, dict)\ntype(object) -> the object's type\ntype(name, bases, dict) -> a new type",vars:"vars([object]) -> dictionary\n\nWithout arguments, equivalent to locals().\nWith an argument, equivalent to object.__dict__.",zip:"zip(iter1 [,iter2 [...]]) --> zip object\n\nReturn a zip object whose .__next__() method returns a tuple where\nthe i-th element comes from the i-th iterable argument. The .__next__()\nmethod continues until the shortest iterable in the argument sequence\nis exhausted and then it raises StopIteration.",} +__BRYTHON__.builtins_doc=docs +; ;(function($B){var _b_=$B.builtins function import_hooks(mod_name,_path,from_stdlib){var meta_path=$B.meta_path.slice(),_sys_modules=$B.imported,_loader,spec if(from_stdlib){ @@ -13214,20 +14155,23 @@ for(var i=0,len=meta_path.length;i < len;i++){var _finder=meta_path[i],find_spec if(find_spec==_b_.None){ var find_module=$B.$getattr(_finder,"find_module",_b_.None) if(find_module !==_b_.None){_loader=find_module(mod_name,_path) -var load_module=$B.$getattr(_loader,"load_module") -module=$B.$call(load_module)(mod_name) +if(_loader !==_b_.None){ +var load_module=$B.$getattr(_loader,"load_module"),module=$B.$call(load_module)(mod_name) _sys_modules[mod_name]=module -return module}}else{spec=find_spec(mod_name,_path,undefined) +return module}}}else{spec=find_spec(mod_name,_path) if(!$B.is_none(spec)){module=$B.imported[spec.name] if(module !==undefined){ return _sys_modules[spec.name]=module} _loader=_b_.getattr(spec,"loader",_b_.None) break}}} if(_loader===undefined){ -var exc=_b_.ImportError.$factory("No module named "+mod_name) +message=mod_name +if($B.protocol=="file"){message+=" (warning: cannot import local files with protocol 'file')"} +var exc=_b_.ModuleNotFoundError.$factory(message) exc.name=mod_name throw exc} -if($B.is_none(module)){var _spec_name=_b_.getattr(spec,"name") +if($B.is_none(module)){if(spec===_b_.None){throw _b_.ModuleNotFoundError.$factory(mod_name)} +var _spec_name=_b_.getattr(spec,"name") if(!$B.is_none(_loader)){var create_module=_b_.getattr(_loader,"create_module",_b_.None) if(!$B.is_none(create_module)){module=$B.$call(create_module)(spec)}} if(module===undefined){throw _b_.ImportError.$factory(mod_name)} @@ -13257,14 +14201,16 @@ $B.import_hooks=import_hooks})(__BRYTHON__) var coroutine=$B.coroutine=$B.make_class("coroutine") coroutine.close=function(self){} coroutine.send=function(self){return self.$func.apply(null,self.$args)} +coroutine.__repr__=coroutine.__str__=function(self){if(self.$func.$infos){return ""}else{return ""}} $B.set_func_names(coroutine,"builtins") $B.make_async=func=>{ -var f=function(){var args=arguments +if(func.$is_genfunc){return func} +var f=function(){var args=arguments,stack=$B.deep_copy($B.frames_stack) return{ -__class__:coroutine,$args:args,$func:func}} +__class__:coroutine,$args:args,$func:func,$stack:stack}} f.$infos=func.$infos return f} -$B.promise=function(obj){if(obj.__class__===$B.JSObject){return obj.js}else if(obj.__class__===coroutine){return coroutine.send(obj)} +$B.promise=function(obj){if(obj.__class__===coroutine){return coroutine.send(obj)} if(typeof obj=="function"){return obj()} return obj}})(__BRYTHON__) ; diff --git a/static/brython/brython_stdlib.js b/static/brython/brython_stdlib.js index c0f140db..d804265b 100644 --- a/static/brython/brython_stdlib.js +++ b/static/brython/brython_stdlib.js @@ -1,3 +1,3 @@ __BRYTHON__.use_VFS = true; -__BRYTHON__.VFS={"array": [".js", "var $module = (function($B){\n\nvar _b_ = $B.builtins,\n $s = [],\n i\nfor(var $b in _b_){$s.push('var ' + $b +' = _b_[\"'+$b+'\"]')}\neval($s.join(';'))\n\nvar typecodes = {\n 'b': Int8Array, // signed char, 1 byte\n 'B': Uint8Array, // unsigned char, 1\n 'u': null, // Py_UNICODE Unicode character, 2 (deprecated)\n 'h': Int16Array, // signed short, 2\n 'H': Uint16Array, // unsigned short, 2\n 'i': Int16Array, // signed int, 2\n 'I': Uint16Array, // unsigned int, 2\n 'l': Int32Array, // signed long, 4\n 'L': Uint32Array, // unsigned long, 4\n 'q': null, // signed long, 8 (not implemented)\n 'Q': null, // unsigned long, 8 (not implemented)\n 'f': Float32Array, // float, 4\n 'd': Float64Array // double float, 8\n}\n\nvar array = $B.make_class(\"array\",\n function(){\n var missing = {},\n $ = $B.args(\"array\", 2, {typecode: null, initializer: null},\n [\"typecode\", \"initializer\"], arguments, {initializer: missing},\n null, null),\n typecode = $.typecode,\n initializer = $.initializer\n if(! typecodes.hasOwnProperty(typecode)){\n throw _b_.ValueError.$factory(\"bad typecode (must be b, \" +\n \"B, u, h, H, i, I, l, L, q, Q, f or d)\")\n }\n if(typecodes[typecode] === null){\n throw _b_.NotImplementedError.$factory(\"type code \" +\n typecode + \"is not implemented\")\n }\n var res = {\n __class__: array,\n typecode: typecode,\n obj: null\n }\n if(initializer !== missing){\n if(Array.isArray(initializer)){\n array.fromlist(res, initializer)\n }else if(_b_.isinstance(initializer, _b_.bytes)){\n array.frombytes(res, initializer)\n }else{\n array.extend(res, initializer)\n }\n }\n return res\n }\n)\n\narray.$buffer_protocol = true\n\narray.__getitem__ = function(self, key){\n if(self.obj && self.obj[key] !== undefined){\n return self.obj[key]\n }\n throw _b_.IndexError(\"array index out of range\")\n}\n\nvar array_iterator = $B.make_iterator_class(\"array_iterator\")\narray.__iter__ = function(self){\n return array_iterator.$factory(self.obj)\n}\n\narray.__len__ = function(self){\n return self.obj.length\n}\n\narray.__str__ = function(self){\n $B.args(\"__str__\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var res = \"array('\" + self.typecode + \"'\"\n if(self.obj !== null){\n res += \", [\" + self.obj + \"]\"\n }\n return res + \")\"\n}\n\nfunction normalize_index(self, i){\n // return an index i between 0 and self.obj.length - 1\n if(i < 0){\n i = self.obj.length + i\n }\n if(i < 0){i = 0}\n else if(i > self.obj.length - 1){\n i = self.obj.length\n }\n return i\n}\n\narray.append = function(self, value){\n $B.args(\"append\", 2, {self: null, value: null},\n [\"self\", \"value\"], arguments, {}, null, null)\n var pos = self.obj === null ? 0 : self.obj.length\n return array.insert(self, pos, value)\n}\n\narray.count = function(self, x){\n $B.args(\"count\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n if(self.obj === null){return 0}\n return self.obj.filter(function(item){return item == x}).length\n}\n\narray.extend = function(self, iterable){\n $B.args(\"extend\", 2, {self: null, iterable: null},\n [\"self\", \"iterable\"], arguments, {}, null, null)\n if(iterable.__class__ === array){\n if(iterable.typecode !== self.typecode){\n throw _b_.TypeError.$factory(\"can only extend with array \" +\n \"of same kind\")\n }\n if(iterable.obj === null){return _b_.None}\n // create new object with length = sum of lengths\n var newobj = new typecodes[self.typecode](self.obj.length +\n iterable.obj.length)\n // copy self.obj\n newobj.set(self.obj)\n // copy iterable.obj\n newobj.set(iterable.obj, self.obj.length)\n self.obj = newobj\n }else{\n var it = _b_.iter(iterable)\n while(true){\n try{\n var item = _b_.next(it)\n array.append(self, item)\n }catch(err){\n if(err.__class__ !== _b_.StopIteration){\n throw err\n }\n break\n }\n }\n }\n return _b_.None\n}\n\narray.frombytes = function(self, s){\n $B.args(\"frombytes\", 2, {self: null, s: null},\n [\"self\", \"s\"], arguments, {}, null, null)\n if(! _b_.isinstance(s, _b_.bytes)){\n throw _b_.TypeError.$factory(\"a bytes-like object is required, \" +\n \"not '\" + $B.class_name(s) + \"'\")\n }\n self.obj = new typecodes[self.typecode](s.source)\n return None\n}\n\narray.fromlist = function(self, list){\n $B.args(\"fromlist\", 2, {self: null, list: null},\n [\"self\", \"list\"], arguments, {}, null, null)\n var it = _b_.iter(list)\n while(true){\n try{\n var item = _b_.next(it)\n try{\n array.append(self, item)\n }catch(err){\n console.log(err)\n return _b_.None\n }\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n return _b_.None\n }\n throw err\n }\n }\n}\n\narray.fromstring = array.frombytes\n\narray.index = function(self, x){\n $B.args(\"index\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.index(x): x not in array\")\n }\n return res\n}\n\narray.insert = function(self, i, value){\n $B.args(\"insert\", 3, {self: null, i: null, value: null},\n [\"self\", \"i\", \"value\"], arguments, {}, null, null)\n if(self.obj === null){\n self.obj = [value]\n }else{\n self.obj.splice(i, 0, value)\n }\n return _b_.None\n}\n\narray.itemsize = function(self){\n return typecodes[self.typecode].BYTES_PER_ELEMENT\n}\n\narray.pop = function(self, i){\n var $ = $B.args(\"count\", 2, {self: null, i: null},\n [\"self\", \"i\"], arguments, {i: -1}, null, null)\n i = $.i\n if(self.obj === null){\n throw _b_.IndexError.$factory(\"pop from empty array\")\n }else if(self.obj.length == 1){\n var res = self.obj[0]\n self.obj = null\n return res\n }\n i = normalize_index(self, i)\n // store value to return\n var res = self.obj[i]\n // create new array, size = previous size - 1\n var newobj = new typecodes[self.typecode](self.obj.length - 1)\n // fill new array with values until i excluded\n newobj.set(self.obj.slice(0, i))\n // fill with values after i\n newobj.set(self.obj.slice(i + 1), i)\n // set self.obj to new array\n self.obj = newobj\n // return stored value\n return res\n}\n\narray.remove = function(self, x){\n $B.args(\"remove\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.remove(x): x not in array\")\n }\n array.pop(self, res)\n return _b_.None\n}\n\narray.reverse = function(self){\n $B.args(\"reverse\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n if(self.obj === null){return _b_.None}\n self.obj.reverse()\n return _b_.None\n}\n\narray.tobytes = function(self){\n $B.args(\"tobytes\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var items = Array.slice.call(null, self.obj),\n res = []\n items.forEach(function(item){\n while(item > 256){\n res.push(item % 256)\n item = Math.floor(item / 256)\n }\n res.push(item)\n })\n return _b_.bytes.$factory(res)\n}\n\narray.tolist = function(self){\n $B.args(\"tolist\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n return Array.slice.call(null, self.obj)\n}\n\narray.tostring = array.tobytes\n\narray.typecode = function(self){\n return self.typecode\n}\n\n$B.set_func_names(array, \"array\")\n\nreturn {\n array: array,\n typecodes: Object.keys(typecodes).join('')\n}\n\n})(__BRYTHON__)\n"], "builtins": [".js", "var $module = (function(){\n var obj = {\n __class__: __BRYTHON__.module,\n __name__: 'builtins'\n },\n builtin_names = ['ArithmeticError', 'AssertionError', 'AttributeError',\n 'BaseException', 'BlockingIOError', 'BrokenPipeError', 'BufferError',\n 'BytesWarning', 'ChildProcessError', 'ConnectionAbortedError',\n 'ConnectionError', 'ConnectionRefusedError', 'ConnectionResetError',\n 'DeprecationWarning', 'EOFError', 'Ellipsis', 'EnvironmentError', 'Exception',\n 'False', 'FileExistsError', 'FileNotFoundError', 'FloatingPointError',\n 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError', 'ImportWarning',\n 'IndentationError', 'IndexError', 'InterruptedError', 'IsADirectoryError',\n 'KeyError', 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',\n 'None', 'NotADirectoryError', 'NotImplemented', 'NotImplementedError',\n 'OSError', 'OverflowError', 'PendingDeprecationWarning', 'PermissionError',\n 'ProcessLookupError', 'ReferenceError', 'ResourceWarning', 'RuntimeError',\n 'RuntimeWarning', 'StopIteration', 'SyntaxError', 'SyntaxWarning',\n 'SystemError', 'SystemExit', 'TabError', 'TimeoutError', 'True', 'TypeError',\n 'UnboundLocalError', 'UnicodeDecodeError', 'UnicodeEncodeError',\n 'UnicodeError', 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning',\n 'ValueError', 'Warning', 'WindowsError', 'ZeroDivisionError', '_',\n '__build_class__', '__debug__', '__doc__', '__import__', '__name__',\n '__package__', 'abs', 'all', 'any', 'ascii', 'bin', 'bool', 'bytearray',\n 'bytes','callable', 'chr', 'classmethod', 'compile', 'complex', 'copyright',\n 'credits','delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'exec',\n 'exit', 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals',\n 'hasattr', 'hash', 'help', 'hex', 'id', 'input', 'int', 'isinstance',\n 'issubclass', 'iter', 'len', 'license', 'list', 'locals', 'map', 'max',\n 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',\n 'property', 'quit', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',\n 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',\n 'vars', 'zip',\n '__newobj__' // defined in py_objects.js ; required for pickle\n ]\n for(var i = 0, len = builtin_names.length; i < len; i++){\n try{eval(\"obj['\" + builtin_names[i] + \"'] = __BRYTHON__.builtins.\" +\n builtin_names[i])}\n catch(err){if (__BRYTHON__.$debug) {console.log(err)}}\n }\n return obj\n})()\n"], "dis": [".js", "var $module=(function($B){\n\nvar dict = $B.builtins.dict\nvar mod = {\n dis:function(src){\n $B.$py_module_path['__main__'] = $B.brython_path\n return __BRYTHON__.py2js(src,'__main__','__main__',\n $B.builtins_scope).to_js()\n },\n OPTIMIZED: 1,\n NEWLOCALS: 2,\n VARARGS: 4,\n VARKEYWORDS: 8,\n NESTED: 16,\n GENERATOR: 32,\n NOFREE: 64,\n COROUTINE: 128,\n ITERABLE_COROUTINE: 256,\n ASYNC_GENERATOR: 512,\n COMPILER_FLAG_NAMES: $B.builtins.dict.$factory()\n}\nmod.COMPILER_FLAG_NAMES = dict.$factory([\n [1, \"OPTIMIZED\"],\n [2, \"NEWLOCALS\"],\n [4, \"VARARGS\"],\n [8, \"VARKEYWORDS\"],\n [16, \"NESTED\"],\n [32, \"GENERATOR\"],\n [64, \"NOFREE\"],\n [128, \"COROUTINE\"],\n [256, \"ITERABLE_COROUTINE\"],\n [512, \"ASYNC_GENERATOR\"]\n])\n\nreturn mod\n\n})(__BRYTHON__)"], "hashlib": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins\n\nvar $s = []\nfor(var $b in _b_){$s.push('var ' + $b +' = _b_[\"'+$b+'\"]')}\neval($s.join(';'))\n\nvar $mod = {\n\n __getattr__ : function(attr){\n if(attr == 'new'){return hash.$factory}\n return this[attr]\n },\n md5: function(obj){return hash.$factory('md5', obj)},\n sha1: function(obj){return hash.$factory('sha1', obj)},\n sha224: function(obj){return hash.$factory('sha224', obj)},\n sha256: function(obj){return hash.$factory('sha256', obj)},\n sha384: function(obj){return hash.$factory('sha384', obj)},\n sha512: function(obj){return hash.$factory('sha512', obj)},\n\n algorithms_guaranteed: ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'],\n algorithms_available: ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']\n}\n\n//todo: eventually move this function to a \"utility\" file or use ajax module?\nfunction $get_CryptoJS_lib(alg){\n if($B.VFS !== undefined){\n // use file in brython_stdlib.js\n var lib = $B.VFS[\"crypto_js.rollups.\" + alg]\n if (lib===undefined){\n throw _b_.ImportError.$factory(\"can't import hashlib.\" + alg)\n }\n var res = lib[1]\n try{\n eval(res + \"; $B.CryptoJS = CryptoJS;\")\n return\n }catch(err){\n throw Error(\"JS Eval Error\",\n \"Cannot eval CryptoJS algorithm '\" + alg + \"' : error:\" + err)\n }\n }\n\n var module = {__name__: 'CryptoJS', $is_package: false}\n var res = $B.$download_module(module, $B.brython_path + 'libs/crypto_js/rollups/' + alg + '.js');\n\n try{\n eval(res + \"; $B.CryptoJS = CryptoJS;\")\n }catch(err){\n throw Error(\"JS Eval Error\",\n \"Cannot eval CryptoJS algorithm '\" + alg + \"' : error:\" + err)\n }\n}\n\nfunction bytes2WordArray(obj){\n // Transform a bytes object into an instance of class WordArray\n // defined in CryptoJS\n if(!_b_.isinstance(obj, _b_.bytes)){\n throw _b_.TypeError(\"expected bytes, got \" + $B.class_name(obj))\n }\n\n var words = []\n for(var i = 0; i < obj.source.length; i += 4){\n var word = obj.source.slice(i, i + 4)\n while(word.length < 4){word.push(0)}\n var w = word[3] + (word[2] << 8) + (word[1] << 16) + (word[0] << 24)\n words.push(w)\n }\n return {words: words, sigBytes: obj.source.length}\n}\n\nvar hash = {\n __class__: _b_.type,\n __mro__: [_b_.object],\n $infos:{\n __name__: 'hash'\n }\n}\n\nhash.update = function(self, msg){\n self.hash.update(bytes2WordArray(msg))\n}\n\nhash.copy = function(self){\n return self.hash.clone()\n}\n\nhash.digest = function(self){\n var obj = self.hash.clone().finalize().toString(),\n res = []\n for(var i = 0; i < obj.length; i += 2){\n res.push(parseInt(obj.substr(i, 2), 16))\n }\n return _b_.bytes.$factory(res)\n}\n\nhash.hexdigest = function(self) {\n return self.hash.clone().finalize().toString()\n}\n\nhash.$factory = function(alg, obj) {\n var res = {\n __class__: hash\n }\n\n switch(alg) {\n case 'md5':\n case 'sha1':\n case 'sha224':\n case 'sha256':\n case 'sha384':\n case 'sha512':\n var ALG = alg.toUpperCase()\n if($B.Crypto === undefined ||\n $B.CryptoJS.algo[ALG] === undefined){$get_CryptoJS_lib(alg)}\n\n res.hash = $B.CryptoJS.algo[ALG].create()\n if(obj !== undefined){\n res.hash.update(bytes2WordArray(obj))\n }\n break\n default:\n throw $B.builtins.AttributeError.$factory('Invalid hash algorithm: ' + alg)\n }\n\n return res\n}\n\nreturn $mod\n\n})(__BRYTHON__)\n"], "long_int": [".js", "/*\nModule to manipulate long integers\n*/\n\nvar $module=(function($B){\n\neval($B.InjectBuiltins())\n\nvar $LongIntDict = {__class__:$B.$type,__name__:'LongInt'}\n\nfunction add_pos(v1, v2){\n // Add two positive numbers\n // v1, v2 : strings\n // Return an instance of LongInt\n\n var res = '', carry = 0, iself=v1.length, sv=0\n for(var i=v2.length-1;i>=0;i--){\n iself--\n if(iself<0){sv=0}else{sv=parseInt(v1.charAt(iself))}\n x = (carry+sv+parseInt(v2.charAt(i))).toString()\n if(x.length==2){res=x.charAt(1)+res;carry=parseInt(x.charAt(0))}\n else{res=x+res;carry=0}\n }\n while(iself>0){\n iself--\n x = (carry+parseInt(v1.charAt(iself))).toString()\n if(x.length==2){res=x.charAt(1)+res;carry=parseInt(x.charAt(0))}\n else{res=x+res;carry=0}\n }\n if(carry){res=carry+res} \n return {__class__:$LongIntDict, value:res, pos:true}\n}\n\nfunction check_shift(shift){\n // Check the argument of >> and <<\n if(!isinstance(shift, LongInt)){\n throw TypeError(\"shift must be LongInt, not '\"+\n $B.get_class(shift).__name__+\"'\")\n }\n if(!shift.pos){throw ValueError(\"negative shift count\")}\n}\n\nfunction clone(obj){\n // Used for traces\n var obj1 = {}\n for(var attr in obj){obj1[attr]=obj[attr]}\n return obj1\n}\n\nfunction comp_pos(v1, v2){\n // Compare two positive numbers\n if(v1.length>v2.length){return 1}\n else if(v1.lengthv2){return 1}\n else if(v11){\n // If the value in cols[i] has more than one digit, only keep the\n // last one and report the others at the right index\n // For instance if cols[i] = 123, keep 3 in cols[i], add 2 to\n // cols[i-1] and 1 to cols[i-2]\n cols[i] = parseInt(col.charAt(col.length-1))\n j = col.length\n while(j-->1){\n var report = parseInt(col.charAt(j-1))\n var pos = i-col.length+j\n if(cols[pos]===undefined){cols[pos]=report}\n else{cols[pos] += report}\n }\n }\n }\n\n // Find minimum index in cols\n // The previous loop may have introduced negative indices\n var imin\n for(var attr in cols){\n i = parseInt(attr)\n if(imin===undefined){imin=i}\n else if(i=v2\n\n var res = '', carry = 0, i1=v1.length, sv=0\n \n // For all digits in v2, starting by the rightmost, substract it from\n // the matching digit in v1\n // This is the equivalent of the manual operation :\n // 12345678\n // - 98765\n //\n // We begin by the rightmost operation : 8-5 (3, no carry),\n // then 7-6 (1, no carry)\n // then 6-7 (9, carry 1) and so on\n for(var i=v2.length-1;i>=0;i--){\n i1--\n sv = parseInt(v1.charAt(i1))\n x = (sv-carry-parseInt(v2.charAt(i)))\n if(x<0){res=(10+x)+res;carry=1}\n else{res=x+res;carry=0}\n }\n \n // If there are remaining digits in v1, substract the carry, if any\n while(i1>0){\n i1--\n x = (parseInt(v1.charAt(i1))-carry)\n if(x<0){res=(10+x)+res;carry=1}\n else{res=x+res;carry=0}\n }\n\n // Remove leading zeros and return the result\n while(res.charAt(0)=='0' && res.length>1){res=res.substr(1)}\n return {__class__:$LongIntDict, value:res, pos:true}\n}\n\n// Special methods to implement operations on instances of LongInt\n\n$LongIntDict.__abs__ = function(self){\n return {__class__:$LongIntDict, value: self.value, pos:true}\n}\n\n$LongIntDict.__add__ = function(self, other){\n if (typeof other == 'number') other=LongInt(_b_.str.$factory(other))\n // Addition of \"self\" and \"other\"\n // If both have the same sign (+ or -) we add their absolute values\n // If they have different sign we use the substraction of their\n // absolute values\n var res\n if(self.pos&&other.pos){ // self > 0, other > 0\n return add_pos(self.value, other.value)\n }else if(!self.pos&&!other.pos){ // self < 0, other < 0\n res = add_pos(self.value, other.value)\n res.pos = false\n return res\n }else if(self.pos && !other.pos){ // self > 0, other < 0\n switch (comp_pos(self.value, other.value)){\n case 1:\n res = sub_pos(self.value, other.value)\n break\n case 0:\n res = {__class__:$LongIntDict, value:0, pos:true}\n break\n case -1:\n res = sub_pos(other.value, self.value)\n res.pos = false\n break\n }\n return res\n }else{ // self < 0, other > 0\n switch(comp_pos(self.value, other.value)){\n case 1:\n res = sub_pos(self.value, other.value)\n res.pos = false\n break\n case 0:\n res = {__class__:$LongIntDict, value:0, pos:true}\n break\n case -1:\n res = sub_pos(other.value, self.value)\n break\n }\n return res\n }\n}\n\n$LongIntDict.__and__ = function(self, other){\n if (typeof other == 'number') other=LongInt(_b_.str.$factory(other))\n // Bitwise \"and\" : build the binary representation of self and other\n var v1 = $LongIntDict.__index__(self)\n var v2 = $LongIntDict.__index__(other)\n // apply \"and\" on zeros and ones\n if(v1.lengthother.value.length){return true}\n else if(self.value.length= other.value}\n}\n\n$LongIntDict.__gt__ = function(self, other){\n return !$LongIntDict.__le__(self, other)\n}\n\n$LongIntDict.__index__ = function(self){\n // Used by bin()\n // returns a string with the binary value of self\n // The algorithm computes the result of the floor division of self by 2\n \n // XXX to do : negative integers\n \n var res = '', pos=self.value.length,\n temp = self.value, d\n while(true){\n d = divmod_pos(temp, '2')\n res = d[1].value + res\n temp = d[0].value\n if(temp=='0'){break}\n }\n return res\n}\n\n$LongIntDict.__invert__ = function(self){\n var bin = $LongIntDict.__index__(self)\n var res = ''\n for(var i=0;iother.value.length){return false}\n else if(self.value.length=0;i--){\n x = (carry+parseInt(res.charAt(i))*2).toString()\n if(x.length==2){res1=x.charAt(1)+res1;carry=parseInt(x.charAt(0))}\n else{res1=x+res1;carry=0}\n }\n if(carry){res1=carry+res1}\n res=res1\n shift = sub_pos(shift.value, '1')\n if(shift.value=='0'){break}\n }\n return {__class__:$LongIntDict, value:res, pos:self.pos}\n}\n\n$LongIntDict.__mod__ = function(self, other){\n return $LongIntDict.__divmod__(self, other)[1]\n}\n\n$LongIntDict.__mro__ = [_b_.object]\n\n$LongIntDict.__mul__ = function(self, other){\n if (typeof other == 'number') other=LongInt(_b_.str.$factory(other))\n var res = mul_pos(self.value, other.value)\n if(self.pos==other.pos){return res}\n res.pos = false\n return res\n}\n\n$LongIntDict.__neg__ = function(obj){\n return {__class__:$LongIntDict, value:obj.value, pos:!obj.pos}\n}\n\n$LongIntDict.__or__ = function(self, other){\n var v1 = $LongIntDict.__index__(self)\n var v2 = $LongIntDict.__index__(other)\n if(v1.length0){\n var dm = divmod_pos(v, base.toString())\n res = parseInt(dm[1].value).toString(base)+res\n v = dm[0].value\n if(v==0){break}\n }\n return res\n}\n\nfunction digits(base){\n // Return an object where keys are all the digits valid in specified base\n // and value is \"true\"\n // Used to test if the string passed as first argument to LongInt is valid\n var is_digits = {}\n // Number from 0 to base, or from 0 to 9 if base > 10\n for(var i=0;i10){\n // Additional letters\n // For instance in base 16, add \"abcdefABCDEF\" as keys\n for(var i=0;i2){\n throw _b_.TypeError(\"LongInt takes at most 2 arguments (\"+\n arguments.length+\" given)\")\n }\n // base defaults to 10\n if(base===undefined){base = 10}\n else if(!isinstance(base, int)){\n throw TypeError(\"'\"+$B.get_class(base).__name__+\"' object cannot be interpreted as an integer\")\n }\n if(base<0 || base==1 || base>36){\n throw ValueError(\"LongInt() base must be >= 2 and <= 36\")\n }\n if(isinstance(value, float)){\n if(value>=0){value=Math.round(value.value)}\n else{value=Math.ceil(value.value)}\n }\n if(typeof value=='number'){\n if(isSafeInteger(value)){value = value.toString()}\n else{throw ValueError(\"argument of long_int is not a safe integer\")}\n }else if(typeof value!='string'){\n throw ValueError(\"argument of long_int must be a string, not \"+\n $B.get_class(value).__name__)\n }\n var has_prefix = false, pos = true, start = 0\n // Strip leading and trailing whitespaces\n while(value.charAt(0)==' ' && value.length){value = value.substr(1)}\n while(value.charAt(value.length-1)==' ' && value.length){\n value = value.substr(0, value.length-1)\n }\n // Check if string starts with + or -\n if(value.charAt(0)=='+'){has_prefix=true}\n else if(value.charAt(0)=='-'){has_prefix=true;pos=false}\n if(has_prefix){\n // Remove prefix\n if(value.length==1){\n // \"+\" or \"-\" alone are not valid arguments\n throw ValueError('LongInt argument is not a valid number: \"'+value+'\"')\n }else{value=value.substr(1)}\n }\n // Ignore leading zeros\n while(start Math.pow(2, 32)}\n\n// Big number Library from jsfromhell.com\n// This library helps with producing \"correct\" results from\n// mathematic operations\n\n//+ Jonas Raoni Soares Silva\n//@ http://jsfromhell.com/classes/bignumber [rev. #4]\n\n\nvar BigNumber = function(n, p, r){\n var o = this, i\n if(n instanceof BigNumber){\n for(i in {precision: 0, roundType: 0, _s: 0, _f: 0}){o[i] = n[i]}\n o._d = n._d.slice()\n return\n }\n o.precision = isNaN(p = Math.abs(p)) ? BigNumber.defaultPrecision : p\n o.roundType = isNaN(r = Math.abs(r)) ? BigNumber.defaultRoundType : r\n o._s = (n += \"\").charAt(0) == \"-\"\n o._f = ((n = n.replace(/[^\\d.]/g, \"\").split(\".\", 2))[0] =\n n[0].replace(/^0+/, \"\") || \"0\").length\n for(i = (n = o._d = (n.join(\"\") || \"0\").split(\"\")).length; i;\n n[--i] = +n[i]){}\n o.round()\n}\nwith({$: BigNumber, o: BigNumber.prototype}){\n $.ROUND_HALF_EVEN = ($.ROUND_HALF_DOWN = ($.ROUND_HALF_UP =\n ($.ROUND_FLOOR = ($.ROUND_CEIL = ($.ROUND_DOWN = ($.ROUND_UP = 0) + 1) +\n 1) + 1) + 1) + 1) + 1\n $.defaultPrecision = 40\n $.defaultRoundType = $.ROUND_HALF_UP\n o.add = function(n){\n if(this._s != (n = new BigNumber(n))._s){\n return n._s ^= 1, this.subtract(n)\n }\n var o = new BigNumber(this),\n a = o._d,\n b = n._d,\n la = o._f,\n lb = n._f,\n n = Math.max(la, lb),\n i,\n r\n la != lb && ((lb = la - lb) > 0 ? o._zeroes(b, lb, 1) :\n o._zeroes(a, -lb, 1))\n i = (la = a.length) == (lb = b.length) ? a.length :\n ((lb = la - lb) > 0 ? o._zeroes(b, lb) : o._zeroes(a, -lb)).length\n for(r = 0; i; r = (a[--i] = a[i] + b[i] + r) / 10 >>> 0, a[i] %= 10){}\n return r && ++n && a.unshift(r), o._f = n, o.round()\n };\n o.subtract = function(n){\n if(this._s != (n = new BigNumber(n))._s)\n return n._s ^= 1, this.add(n);\n var o = new BigNumber(this),\n c = o.abs().compare(n.abs()) + 1,\n a = c ? o : n,\n b = c ? n : o,\n la = a._f,\n lb = b._f,\n d = la,\n i,\n j;\n a = a._d, b = b._d, la != lb && ((lb = la - lb) > 0 ? o._zeroes(b, lb, 1) : o._zeroes(a, -lb, 1));\n for(i = (la = a.length) == (lb = b.length) ? a.length : ((lb = la - lb) > 0 ? o._zeroes(b, lb) : o._zeroes(a, -lb)).length; i;){\n if(a[--i] < b[i]){\n for(j = i; j && !a[--j]; a[j] = 9);\n --a[j], a[i] += 10;\n }\n b[i] = a[i] - b[i];\n }\n return c || (o._s ^= 1), o._f = d, o._d = b, o.round();\n };\n o.multiply = function(n){\n var o = new BigNumber(this), r = o._d.length >= (n = new BigNumber(n))._d.length, a = (r ? o : n)._d,\n b = (r ? n : o)._d, la = a.length, lb = b.length, x = new BigNumber, i, j, s;\n for(i = lb; i; r && s.unshift(r), x.set(x.add(new BigNumber(s.join(\"\")))))\n for(s = (new Array(lb - --i)).join(\"0\").split(\"\"), r = 0, j = la; j; r += a[--j] * b[i], s.unshift(r % 10), r = (r / 10) >>> 0);\n return o._s = o._s != n._s, o._f = ((r = la + lb - o._f - n._f) >= (j = (o._d = x._d).length) ? this._zeroes(o._d, r - j + 1, 1).length : j) - r, o.round();\n };\n o.divide = function(n){\n if((n = new BigNumber(n)) == \"0\")\n throw new Error(\"Division by 0\");\n else if(this == \"0\")\n return new BigNumber;\n var o = new BigNumber(this), a = o._d, b = n._d, la = a.length - o._f,\n lb = b.length - n._f, r = new BigNumber, i = 0, j, s, l, f = 1, c = 0, e = 0;\n r._s = o._s != n._s, r.precision = Math.max(o.precision, n.precision),\n r._f = +r._d.pop(), la != lb && o._zeroes(la > lb ? b : a, Math.abs(la - lb));\n n._f = b.length, b = n, b._s = false, b = b.round();\n for(n = new BigNumber; a[0] == \"0\"; a.shift());\n out:\n do{\n for(l = c = 0, n == \"0\" && (n._d = [], n._f = 0); i < a.length && n.compare(b) == -1; ++i){\n (l = i + 1 == a.length, (!f && ++c > 1 || (e = l && n == \"0\" && a[i] == \"0\")))\n && (r._f == r._d.length && ++r._f, r._d.push(0));\n (a[i] == \"0\" && n == \"0\") || (n._d.push(a[i]), ++n._f);\n if(e)\n break out;\n if((l && n.compare(b) == -1 && (r._f == r._d.length && ++r._f, 1)) || (l = 0))\n while(r._d.push(0), n._d.push(0), ++n._f, n.compare(b) == -1);\n }\n if(f = 0, n.compare(b) == -1 && !(l = 0))\n while(l ? r._d.push(0) : l = 1, n._d.push(0), ++n._f, n.compare(b) == -1);\n for(s = new BigNumber, j = 0; n.compare(y = s.add(b)) + 1 && ++j; s.set(y));\n n.set(n.subtract(s)), !l && r._f == r._d.length && ++r._f, r._d.push(j);\n }\n while((i < a.length || n != \"0\") && (r._d.length - r._f) <= r.precision);\n return r.round();\n };\n o.mod = function(n){\n return this.subtract(this.divide(n).intPart().multiply(n));\n };\n o.pow = function(n){\n var o = new BigNumber(this), i;\n if((n = (new BigNumber(n)).intPart()) == 0) return o.set(1);\n for(i = Math.abs(n); --i; o.set(o.multiply(this)));\n return n < 0 ? o.set((new BigNumber(1)).divide(o)) : o;\n };\n o.set = function(n){\n return this.constructor(n), this;\n };\n o.compare = function(n){\n var a = this, la = this._f, b = new BigNumber(n), lb = b._f, r = [-1, 1], i, l;\n if(a._s != b._s)\n return a._s ? -1 : 1;\n if(la != lb)\n return r[(la > lb) ^ a._s];\n for(la = (a = a._d).length, lb = (b = b._d).length, i = -1, l = Math.min(la, lb); ++i < l;)\n if(a[i] != b[i])\n return r[(a[i] > b[i]) ^ a._s];\n return la != lb ? r[(la > lb) ^ a._s] : 0;\n };\n o.negate = function(){\n var n = new BigNumber(this); return n._s ^= 1, n;\n };\n o.abs = function(){\n var n = new BigNumber(this); return n._s = 0, n;\n };\n o.intPart = function(){\n return new BigNumber((this._s ? \"-\" : \"\") + (this._d.slice(0, this._f).join(\"\") || \"0\"));\n };\n o.valueOf = o.toString = function(){\n var o = this;\n return (o._s ? \"-\" : \"\") + (o._d.slice(0, o._f).join(\"\") || \"0\") + (o._f != o._d.length ? \".\" + o._d.slice(o._f).join(\"\") : \"\");\n };\n o._zeroes = function(n, l, t){\n var s = [\"push\", \"unshift\"][t || 0];\n for(++l; --l; n[s](0));\n return n;\n };\n o.round = function(){\n if(\"_rounding\" in this) return this;\n var $ = BigNumber, r = this.roundType, b = this._d, d, p, n, x;\n for(this._rounding = true; this._f > 1 && !b[0]; --this._f, b.shift());\n for(d = this._f, p = this.precision + d, n = b[p]; b.length > d && !b[b.length -1]; b.pop());\n x = (this._s ? \"-\" : \"\") + (p - d ? \"0.\" + this._zeroes([], p - d - 1).join(\"\") : \"\") + 1;\n if(b.length > p){\n n && (r == $.DOWN ? false : r == $.UP ? true : r == $.CEIL ? !this._s\n : r == $.FLOOR ? this._s : r == $.HALF_UP ? n >= 5 : r == $.HALF_DOWN ? n > 5\n : r == $.HALF_EVEN ? n >= 5 && b[p - 1] & 1 : false) && this.add(x);\n b.splice(p, b.length - p);\n }\n return delete this._rounding, this;\n };\n}\n\nvar isNegZero = function(x) {return x === 0 && Math.atan2(x,x) < 0}\n\nvar _mod = {\n __getattr__ : function(attr){\n var res = this[attr]\n if(res === undefined){$raise('AttributeError',\n 'module math has no attribute ' + attr)}\n return res\n },\n acos: function(x) {return float.$factory(Math.acos(float_check(x)))},\n acosh: function(x) {\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var y = float_check(x)\n return float.$factory(Math.log(y + Math.sqrt(y * y - 1)))\n },\n asin: function(x) {return float.$factory(Math.asin(float_check(x)))},\n asinh: function(x) {\n if(_b_.$isninf(x)){return float.$factory('-inf')}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var y = float_check(x)\n return float.$factory(Math.log(y + Math.sqrt(y * y + 1)))\n },\n atan: function(x) {\n if(_b_.$isninf(x)){return float.$factory(-Math.PI / 2)}\n if(_b_.$isinf(x)){return float.$factory(Math.PI / 2)}\n return float.$factory(Math.atan(float_check(x)))},\n atan2: function(y, x) {\n return float.$factory(Math.atan2(float_check(y), float_check(x)))\n },\n atanh: function(x) {\n var y = float_check(x)\n if(y == 0){return 0}\n return float.$factory(0.5 * Math.log((1 / y + 1)/(1 / y - 1)));\n },\n ceil: function(x) {\n try{return getattr(x, '__ceil__')()}catch(err){}\n\n if(_b_.$isninf(x)){return float.$factory('-inf')}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n if(isNaN(x)){return float.$factory('nan')}\n\n var y = float_check(x)\n if(! isNaN(parseFloat(y)) && isFinite(y)){\n return int.$factory(Math.ceil(y))\n }\n\n $raise('ValueError',\n 'object is not a number and does not contain __ceil__')\n },\n copysign: function(x,y) {\n var x1 = Math.abs(float_check(x))\n var y1 = float_check(y)\n var sign = Math.sign(y1)\n sign = (sign == 1 || Object.is(sign, +0)) ? 1 : - 1\n return float.$factory(x1 * sign)\n },\n cos : function(x){return float.$factory(Math.cos(float_check(x)))},\n cosh: function(x){\n if(_b_.$isinf(x)) {return float.$factory('inf')}\n var y = float_check(x)\n if(Math.cosh !== undefined){return float.$factory(Math.cosh(y))}\n return float.$factory((Math.pow(Math.E, y) + Math.pow(Math.E, -y)) / 2)\n },\n degrees: function(x){return float.$factory(float_check(x) * 180 / Math.PI)},\n e: float.$factory(Math.E),\n erf: function(x) {\n // inspired from\n // http://stackoverflow.com/questions/457408/is-there-an-easily-available-implementation-of-erf-for-python\n var y = float_check(x)\n var t = 1.0 / (1.0 + 0.5 * Math.abs(y))\n var ans = 1 - t * Math.exp( -y * y - 1.26551223 +\n t * ( 1.00002368 +\n t * ( 0.37409196 +\n t * ( 0.09678418 +\n t * (-0.18628806 +\n t * ( 0.27886807 +\n t * (-1.13520398 +\n t * ( 1.48851587 +\n t * (-0.82215223 +\n t * 0.17087277)))))))))\n if(y >= 0.0){return ans}\n return -ans\n },\n\n erfc: function(x) {\n // inspired from\n // http://stackoverflow.com/questions/457408/is-there-an-easily-available-implementation-of-erf-for-python\n var y = float_check(x)\n var t = 1.0 / (1.0 + 0.5 * Math.abs(y))\n var ans = 1 - t * Math.exp( -y * y - 1.26551223 +\n t * ( 1.00002368 +\n t * ( 0.37409196 +\n t * ( 0.09678418 +\n t * (-0.18628806 +\n t * ( 0.27886807 +\n t * (-1.13520398 +\n t * ( 1.48851587 +\n t * (-0.82215223 +\n t * 0.17087277)))))))))\n if(y >= 0.0){return 1 - ans}\n return 1 + ans\n },\n exp: function(x){\n if(_b_.$isninf(x)){return float.$factory(0)}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var _r = Math.exp(float_check(x))\n if(_b_.$isinf(_r)){throw OverflowError(\"math range error\")}\n return float.$factory(_r)\n },\n expm1: function(x){\n if(_b_.$isninf(x)){return float.$factory(0)}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var _r = Math.expm1(float_check(x))\n if(_b_.$isinf(_r)){throw OverflowError(\"math range error\")}\n return float.$factory(_r)\n },\n //fabs: function(x){ return x>0?float.$factory(x):float.$factory(-x)},\n fabs: function(x){return _b_.$fabs(x)}, //located in py_float.js\n factorial: function(x) {\n //using code from http://stackoverflow.com/questions/3959211/fast-factorial-function-in-javascript\n var y = float_check(x),\n r = 1\n for(var i = 2; i <= y; i++){r *= i}\n return r\n },\n floor: function(x){return Math.floor(float_check(x))},\n fmod: function(x,y){return float.$factory(float_check(x) % float_check(y))},\n frexp: function(x){\n var _l = _b_.$frexp(x)\n return _b_.tuple.$factory([float.$factory(_l[0]), _l[1]])\n },\n fsum: function(x){\n /* Translation into Javascript of the function msum in an Active\n State Cookbook recipe : https://code.activestate.com/recipes/393090/\n by Raymond Hettinger\n */\n var partials = [],\n res = new Number(),\n _it = _b_.iter(x)\n while(true){\n try{\n var x = _b_.next(_it),\n i = 0\n for(var j = 0, len = partials.length; j < len; j++){\n var y = partials[j]\n if(Math.abs(x) < Math.abs(y)){\n var z = x\n x = y\n y = z\n }\n var hi = x + y,\n lo = y - (hi - x)\n if(lo){\n partials[i] = lo\n i++\n }\n x = hi\n }\n partials = partials.slice(0, i).concat([x])\n }catch(err){\n if(_b_.isinstance(err, _b_.StopIteration)){break}\n throw err\n }\n }\n var res = new Number(0)\n for(var i = 0; i < partials.length; i++){\n res += new Number(partials[i])\n }\n return new Number(res)\n },\n gamma: function(x){\n if(_b_.isinstance(x, int)){\n if(i < 1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n var res = 1\n for(var i = 1; i < x; i++){res *= i}\n return new Number(res)\n }\n // Adapted from https://en.wikipedia.org/wiki/Lanczos_approximation\n var p = [676.5203681218851,\n -1259.1392167224028,\n 771.32342877765313,\n -176.61502916214059,\n 12.507343278686905,\n -0.13857109526572012,\n 9.9843695780195716e-6,\n 1.5056327351493116e-7\n ]\n\n var EPSILON = 1e-07\n function drop_imag(z){\n if(Math.abs(z.imag) <= EPSILON){\n z = z.real\n }\n return z\n }\n var z = x\n if(z < 0.5){\n var y = Math.PI / (Math.sin(Math.PI * z) * _mod.gamma(1-z)) // Reflection formula\n }else{\n z -= 1\n var x = 0.99999999999980993,\n i = 0\n for(var i = 0, len = p.length; i < len; i++){\n var pval = p[i]\n x += pval / (z + i + 1)\n }\n var t = z + p.length - 0.5,\n sq = Math.sqrt(2 * Math.PI),\n y = sq * Math.pow(t, (z + 0.5)) * Math.exp(-t) * x\n }\n return drop_imag(y)\n },\n gcd: function(){\n var $ = $B.args(\"gcd\", 2, {a: null, b: null}, ['a', 'b'],\n arguments, {}, null, null),\n a = $B.PyNumber_Index($.a),\n b = $B.PyNumber_Index($.b)\n if(a == 0 && b == 0){return 0}\n // https://stackoverflow.com/questions/17445231/js-how-to-find-the-greatest-common-divisor\n a = Math.abs(a);\n b = Math.abs(b);\n if(b > a){var temp = a; a = b; b = temp;}\n while(true){\n if(b == 0){return a}\n a %= b\n if(a == 0){return b}\n b %= a\n }\n },\n hypot: function(x,y){\n if(_b_.$isinf(x) || _b_.$isinf(y)){return float.$factory('inf')}\n var x1 = float_check(x),\n y1 = float_check(y)\n return float.$factory(Math.sqrt(x1 * x1 + y1 * y1))},\n inf: float.$factory('inf'),\n isclose:function(){\n var $ns = $B.args(\"isclose\",\n 4,\n {a: null, b: null, rel_tol: null, abs_tol: null},\n ['a', 'b', 'rel_tol', 'abs_tol'],\n arguments,\n {rel_tol: 1e-09, abs_tol: 0.0},\n null,\n null)\n var a = $ns['a'],\n b = $ns['b'],\n rel_tol = $ns['rel_tol'],\n abs_tol = $ns['abs_tol']\n if(rel_tol < 0.0 || abs_tol < 0.0){\n throw ValueError('tolerances must be non-negative')\n }\n if(a == b){return True}\n if(_b_.$isinf(a) || _b_.$isinf(b)){return false}\n var diff = _b_.$fabs(b - a)\n var result = (\n (diff <= _b_.$fabs(rel_tol * b)) ||\n (diff <= _b_.$fabs(rel_tol * a))\n ) || (diff <= _b_.$fabs(abs_tol)\n )\n return result\n },\n isfinite: function(x){return isFinite(float_check(x))},\n isinf: function(x){return _b_.$isinf(float_check(x))},\n isnan: function(x){return isNaN(float_check(x))},\n ldexp: function(x, i){return _b_.$ldexp(x, i)}, //located in py_float.js\n lgamma: function(x){\n return new Number(Math.log(Math.abs(_mod.gamma(x))))\n },\n log: function(x, base){\n var x1 = float_check(x)\n if(base === undefined){return float.$factory(Math.log(x1))}\n return float.$factory(Math.log(x1) / Math.log(float_check(base)))\n },\n log1p: function(x){return float.$factory(Math.log1p(float_check(x)))},\n log2: function(x){\n if(isNaN(x)){return float.$factory('nan')}\n if(_b_.$isninf(x)) {throw ValueError('')}\n var x1 = float_check(x)\n if(x1 < 0.0){throw ValueError('')}\n return float.$factory(Math.log(x1) / Math.LN2)\n },\n log10: function(x) {\n return float.$factory(Math.log10(float_check(x)))\n },\n modf: function(x) {\n if(_b_.$isninf(x)){\n return _b_.tuple.$factory([0.0, float.$factory('-inf')])\n }\n if(_b_.$isinf(x)){\n return _b_.tuple.$factory([0.0, float.$factory('inf')])\n }\n if(isNaN(x)){\n return _b_.tuple.$factory([float.$factory('nan'),\n float.$factory('nan')])\n }\n\n var x1 = float_check(x)\n if(x1 > 0){\n var i = float.$factory(x1 - Math.floor(x1))\n return _b_.tuple.$factory([i, float.$factory(x1 - i)])\n }\n\n var x2 = Math.ceil(x1)\n var i = float.$factory(x1 - x2)\n return _b_.tuple.$factory([i, float.$factory(x2)])\n },\n nan: float.$factory('nan'),\n pi : float.$factory(Math.PI),\n pow: function(x,y) {\n var x1 = float_check(x)\n var y1 = float_check(y)\n if(y1 == 0){return float.$factory(1)}\n if(x1 == 0 && y1 < 0){throw _b_.ValueError('')}\n\n if(isNaN(y1)){\n if(x1 == 1){return float.$factory(1)}\n return float.$factory('nan')\n }\n if(x1 == 0){return float.$factory(0)}\n\n if(_b_.$isninf(y)){\n if(x1 == 1 || x1 == -1){return float.$factory(1)}\n if(x1 < 1 && x1 > -1){return float.$factory('inf')}\n return float.$factory(0)\n }\n if(_b_.$isinf(y)){\n if(x1 == 1 || x1 == -1){return float.$factory(1)}\n if(x1 < 1 && x1 > -1){return float.$factory(0)}\n return float.$factory('inf')\n }\n\n if(isNaN(x1)){return float.$factory('nan')}\n if(_b_.$isninf(x)){\n if(y1 > 0 && isOdd(y1)){return float.$factory('-inf')}\n if(y1 > 0){return float.$factory('inf')} // this is even or a float\n if(y1 < 0){return float.$factory(0)}\n return float.$factory(1)\n }\n\n if(_b_.$isinf(x)){\n if(y1 > 0){return float.$factory('inf')}\n if(y1 < 0){return float.$factory(0)}\n return float.$factory(1)\n }\n\n var r\n if(isLargeNumber(x1) || isLargeNumber(y1)){\n var x = new BigNumber(x1),\n y = new BigNumber(y1)\n r = x.pow(y)\n }else{\n r = Math.pow(x1,y1)\n }\n\n if(isNaN(r)){return float.$factory('nan')}\n if(_b_.$isninf(r)){return float.$factory('-inf')}\n if(_b_.$isinf(r)){return float.$factory('inf')}\n\n return r\n },\n radians: function(x){\n return float.$factory(float_check(x) * Math.PI / 180)\n },\n sin : function(x){return float.$factory(Math.sin(float_check(x)))},\n sinh: function(x) {\n //if (_b_.$isinf(x)) return float.$factory('inf');\n var y = float_check(x)\n if(Math.sinh !== undefined){return float.$factory(Math.sinh(y))}\n return float.$factory(\n (Math.pow(Math.E, y) - Math.pow(Math.E, -y)) / 2)\n },\n sqrt : function(x){\n var y = float_check(x)\n if(y < 0){throw ValueError(\"math range error\")}\n if(_b_.$isinf(y)){return float.$factory('inf')}\n var _r = Math.sqrt(y)\n if(_b_.$isinf(_r)){throw OverflowError(\"math range error\")}\n return float.$factory(_r)\n },\n tan: function(x) {\n var y = float_check(x)\n return float.$factory(Math.tan(y))\n },\n tanh: function(x) {\n var y = float_check(x)\n if(Math.tanh !== undefined){return float.$factory(Math.tanh(y))}\n return float.$factory((Math.pow(Math.E, y) - Math.pow(Math.E, -y))/\n (Math.pow(Math.E, y) + Math.pow(Math.E, -y)))\n },\n trunc: function(x) {\n try{return getattr(x, '__trunc__')()}catch(err){}\n var x1 = float_check(x)\n if(!isNaN(parseFloat(x1)) && isFinite(x1)){\n if(Math.trunc !== undefined){return int.$factory(Math.trunc(x1))}\n if(x1 > 0){return int.$factory(Math.floor(x1))}\n return int.$factory(Math.ceil(x1)) // x1 < 0\n }\n $raise('ValueError',\n 'object is not a number and does not contain __trunc__')\n }\n}\n\nfor(var $attr in _mod){\n if(typeof _mod[$attr] === 'function'){\n _mod[$attr].__repr__ = (function(func){\n return function(){return ''}\n })($attr)\n _mod[$attr].__str__ = (function(func){\n return function(){return ''}\n })($attr)\n }\n}\n\nreturn _mod\n\n})(__BRYTHON__)\n"], "modulefinder": [".js", "var $module=(function($B){\n\nvar _b_=$B.builtins\nvar _mod = {}\n\n$ModuleFinderDict = {__class__:_b_.type,__name__:'ModuleFinder'}\n$ModuleFinderDict.__mro__ = [_b_.object]\n\n$ModuleFinderDict.run_script = function(self, pathname){\n // pathname is the url of a Python script\n var py_src = _b_.$open(pathname).read()\n // transform into internal Brython tree structure\n var root = $B.py2js(py_src)\n // walk the tree to find occurences of imports\n function walk(node){\n var modules = []\n var ctx = node.context\n if(ctx && ctx.type=='node'){ctx = ctx.tree[0]}\n\n if(ctx && ctx.type==\"import\"){\n for(var i=0, _len_i = ctx.tree.length; i < _len_i;i++){\n if(modules.indexOf(ctx.tree[i].name)==-1){\n modules.push(ctx.tree[i].name)\n }\n }\n }else if(ctx && ctx.type==\"from\"){\n if(modules.indexOf(ctx.module)==-1){\n modules.push(ctx.module)\n }\n }\n \n for(var i=0, _len_i = node.children.length; i < _len_i;i++){\n mods = walk(node.children[i])\n for(var j=0, _len_j = mods.length; j < _len_j;j++){\n if(modules.indexOf(mods[j])==-1){modules.push(mods[j])}\n }\n }\n return modules\n }\n self.modules = walk(root)\n}\n\n_mod.ModuleFinder = function(){return {__class__:$ModuleFinderDict}\n}\n_mod.ModuleFinder.$dict = $ModuleFinderDict\n_mod.ModuleFinder.__class__ = $B.$factory\n$ModuleFinderDict.$factory = _mod.ModuleFinder\n\nreturn _mod\n})(__BRYTHON__)\n"], "posix": [".js", "/*\nThis module provides access to operating system functionality that is\nstandardized by the C Standard and the POSIX standard (a thinly\ndisguised Unix interface). Refer to the library manual and\ncorresponding Unix manual entries for more information on calls.\n*/\n\nvar $B = __BRYTHON__,\n _b_ = $B.builtins\n\nfunction _randint(a, b){\n return parseInt(Math.random() * (b - a + 1) + a)\n}\n\nvar stat_result = $B.make_class(\"stat_result\",\n function(){\n var res = {\n __class__: stat_result,\n st_atime: new Date(),\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: 0,\n st_size: 1\n };\n [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"].\n forEach(function(item){\n res[\"st_\" + item] = res.st_atime\n });\n return res\n }\n)\n$B.set_func_names(stat_result, \"posix\")\n\nvar $module = {\n F_OK: 0,\n O_APPEND: 8,\n O_BINARY: 32768,\n O_CREAT: 256,\n O_EXCL: 1024,\n O_NOINHERIT: 128,\n O_RANDOM: 16,\n O_RDONLY: 0,\n O_RDWR: 2,\n O_SEQUENTIAL: 32,\n O_SHORT_LIVED: 4096,\n O_TEMPORARY: 64,\n O_TEXT: 16384,\n O_TRUNC: 512,\n O_WRONLY: 1,\n P_DETACH: 4,\n P_NOWAIT: 1,\n P_NOWAITO: 3,\n P_OVERLAY: 2,\n P_WAIT: 0,\n R_OK: 4,\n TMP_MAX: 32767,\n W_OK: 2,\n X_OK: 1,\n _have_functions: ['MS_WINDOWS'],\n environ: _b_.dict.$factory(\n [['PYTHONPATH', $B.brython_path],\n ['PYTHONUSERBASE', ' ']]),\n error: _b_.OSError,\n getcwd: function(){return $B.brython_path},\n getpid: function(){return 0},\n lstat: function(){return stat_result.$factory()},\n open: function(path, flags){return _b_.open(path, flags)},\n stat: function(){return stat_result.$factory()},\n urandom: function(n){\n var randbytes = []\n for(var i = 0; i < n; i++){\n randbytes.push(_randint(0, 255))\n }\n return _b_.bytes.$factory(randbytes)\n },\n WTERMSIG: function(){return 0},\n WNOHANG: function(){return _b_.tuple.$factory([0, 0])}\n};\n\n[\"WCOREDUMP\", \"WIFCONTINUED\", \"WIFSTOPPED\", \"WIFSIGNALED\", \"WIFEXITED\"].forEach(function(funcname){\n $module[funcname] = function(){return false}\n });\n\n[\"WEXITSTATUS\", \"WSTOPSIG\", \"WTERMSIG\"].\n forEach(function(funcname){\n $module[funcname] = function(){return _b_.None}\n });\n\n[\"_exit\", \"_getdiskusage\", \"_getfileinformation\", \"_getfinalpathname\",\n \"_getfullpathname\", \"_isdir\", \"abort\", \"access\", \"chdir\", \"chmod\",\n \"close\", \"closerange\", \"device_encoding\", \"dup\", \"dup2\",\n \"execv\", \"execve\", \"fsat\", \"fsync\", \"get_terminal_size\", \"getcwdb\",\n \"getlogin\", \"getppid\", \"isatty\", \"kill\", \"link\", \"listdir\", \"lseek\",\n \"mkdir\", \"pipe\", \"putenv\", \"read\", \"readlink\", \"remove\", \"rename\",\n \"replace\", \"rmdir\", \"spawnv\", \"spawnve\", \"startfile\", \"stat_float_times\",\n \"statvfs_result\", \"strerror\", \"symlink\", \"system\", \"terminal_size\",\n \"times\", \"times_result\", \"umask\", \"uname_result\", \"unlink\", \"utime\",\n \"waitpid\", \"write\"].forEach(function(funcname){\n $module[funcname] = function(){\n throw _b_.NotImplementedError.$factory(\"posix.\" + funcname +\n \" is not implemented\")\n }\n });\n"], "random": [".js", "// Javascript implementation of the random module\n// Based on Ian Bicking's implementation of the Mersenne twister\n\nvar $module = (function($B){\n\nvar _b_ = $B.builtins,\n i\n\nvar VERSION = 3\n\n// Code copied from https://github.com/ianb/whrandom/blob/master/mersenne.js\n// by Ian Bicking\n\n// this program is a JavaScript version of Mersenne Twister,\n// a straight conversion from the original program, mt19937ar.c,\n// translated by y. okada on july 17, 2006.\n// and modified a little at july 20, 2006, but there are not any substantial differences.\n// modularized by Ian Bicking, March 25, 2013 (found original version at http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/VERSIONS/JAVASCRIPT/java-script.html)\n// in this program, procedure descriptions and comments of original source code were not removed.\n// lines commented with //c// were originally descriptions of c procedure. and a few following lines are appropriate JavaScript descriptions.\n// lines commented with /* and */ are original comments.\n// lines commented with // are additional comments in this JavaScript version.\n/*\n A C-program for MT19937, with initialization improved 2002/1/26.\n Coded by Takuji Nishimura and Makoto Matsumoto.\n\n Before using, initialize the state by using init_genrand(seed)\n or init_by_array(init_key, key_length).\n\n Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions\n are met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n 3. The names of its contributors may not be used to endorse or promote\n products derived from this software without specific prior written\n permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n Any feedback is very welcome.\n http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html\n email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)\n*/\n\nfunction RandomStream(seed) {\n\n /*jshint bitwise:false */\n /* Period parameters */\n //c//#define N 624\n //c//#define M 397\n //c//#define MATRIX_A 0x9908b0dfUL /* constant vector a */\n //c//#define UPPER_MASK 0x80000000UL /* most significant w-r bits */\n //c//#define LOWER_MASK 0x7fffffffUL /* least significant r bits */\n var N = 624\n var M = 397\n var MATRIX_A = 0x9908b0df /* constant vector a */\n var UPPER_MASK = 0x80000000 /* most significant w-r bits */\n var LOWER_MASK = 0x7fffffff /* least significant r bits */\n //c//static unsigned long mt[N]; /* the array for the state vector */\n //c//static int mti=N+1; /* mti==N+1 means mt[N] is not initialized */\n var mt = new Array(N) /* the array for the state vector */\n var mti = N + 1 /* mti==N+1 means mt[N] is not initialized */\n\n function unsigned32(n1){\n // returns a 32-bits unsiged integer from an operand to which applied a\n // bit operator.\n return n1 < 0 ? (n1 ^ UPPER_MASK) + UPPER_MASK : n1\n }\n\n function subtraction32(n1, n2){\n // emulates lowerflow of a c 32-bits unsiged integer variable, instead of\n // the operator -. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return n1 < n2 ? unsigned32((0x100000000 - (n2 - n1)) & 0xffffffff) :\n n1 - n2\n }\n\n function addition32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of\n // the operator +. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return unsigned32((n1 + n2) & 0xffffffff)\n }\n\n function multiplication32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of the\n // operator *. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n var sum = 0\n for (var i = 0; i < 32; ++i){\n if((n1 >>> i) & 0x1){\n sum = addition32(sum, unsigned32(n2 << i))\n }\n }\n return sum\n }\n\n /* initializes mt[N] with a seed */\n //c//void init_genrand(unsigned long s)\n function init_genrand(s) {\n //c//mt[0]= s & 0xffffffff;\n mt[0] = unsigned32(s & 0xffffffff)\n for(mti = 1; mti < N; mti++){\n mt[mti] =\n //c//(1812433253 * (mt[mti-1] ^ (mt[mti-1] >> 30)) + mti);\n addition32(multiplication32(1812433253,\n unsigned32(mt[mti - 1] ^ (mt[mti - 1] >>> 30))), mti)\n /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */\n /* In the previous versions, MSBs of the seed affect */\n /* only MSBs of the array mt[]. */\n /* 2002/01/09 modified by Makoto Matsumoto */\n //c//mt[mti] &= 0xffffffff;\n mt[mti] = unsigned32(mt[mti] & 0xffffffff);\n /* for >32 bit machines */\n }\n }\n\n /* initialize by an array with array-length */\n /* init_key is the array for initializing keys */\n /* key_length is its length */\n /* slight change for C++, 2004/2/26 */\n //c//void init_by_array(unsigned long init_key[], int key_length)\n function init_by_array(init_key, key_length) {\n //c//int i, j, k;\n var i, j, k\n init_genrand(19650218)\n i = 1\n j = 0\n k = (N > key_length ? N : key_length)\n for(; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1664525))\n //c// + init_key[j] + j; /* non linear */\n mt[i] = addition32(\n addition32(unsigned32(mt[i] ^\n multiplication32(unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1664525)),\n init_key[j]), j)\n mt[i] =\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n unsigned32(mt[i] & 0xffffffff)\n i++\n j++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n if(j >= key_length){j = 0}\n }\n for(k = N - 1; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1566083941))\n //c//- i; /* non linear */\n mt[i] = subtraction32(\n unsigned32(\n (mt[i]) ^\n multiplication32(\n unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1566083941)),\n i\n )\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n mt[i] = unsigned32(mt[i] & 0xffffffff)\n i++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n }\n mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */\n }\n\n /* generates a random number on [0,0xffffffff]-interval */\n //c//unsigned long genrand_int32(void)\n function genrand_int32() {\n //c//unsigned long y;\n //c//static unsigned long mag01[2]={0x0UL, MATRIX_A};\n var y;\n var mag01 = [0x0, MATRIX_A];\n /* mag01[x] = x * MATRIX_A for x=0,1 */\n\n if(mti >= N){ /* generate N words at one time */\n //c//int kk;\n var kk\n\n if(mti == N + 1){ /* if init_genrand() has not been called, */\n init_genrand(Date.now()) /* a default initial seed is used */\n }\n\n for(kk = 0; kk < N - M; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+M] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n for(;kk < N - 1; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+(M-N)] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n //c//y = (mt[N-1]&UPPER_MASK)|(mt[0]&LOWER_MASK);\n //c//mt[N-1] = mt[M-1] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK))\n mt[N - 1] = unsigned32(mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1])\n mti = 0\n }\n\n y = mt[mti++]\n\n /* Tempering */\n //c//y ^= (y >> 11);\n //c//y ^= (y << 7) & 0x9d2c5680;\n //c//y ^= (y << 15) & 0xefc60000;\n //c//y ^= (y >> 18);\n y = unsigned32(y ^ (y >>> 11))\n y = unsigned32(y ^ ((y << 7) & 0x9d2c5680))\n y = unsigned32(y ^ ((y << 15) & 0xefc60000))\n y = unsigned32(y ^ (y >>> 18))\n\n return y\n }\n\n /* generates a random number on [0,0x7fffffff]-interval */\n //c//long genrand_int31(void)\n function genrand_int31(){\n //c//return (genrand_int32()>>1);\n return (genrand_int32()>>>1)\n }\n\n /* generates a random number on [0,1]-real-interval */\n //c//double genrand_real1(void)\n function genrand_real1(){\n return genrand_int32()*(1.0/4294967295.0)\n /* divided by 2^32-1 */\n }\n\n /* generates a random number on [0,1)-real-interval */\n //c//double genrand_real2(void)\n function genrand_real2(){\n return genrand_int32() * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on (0,1)-real-interval */\n //c//double genrand_real3(void)\n function genrand_real3() {\n return ((genrand_int32()) + 0.5) * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on [0,1) with 53-bit resolution*/\n //c//double genrand_res53(void)\n function genrand_res53() {\n //c//unsigned long a=genrand_int32()>>5, b=genrand_int32()>>6;\n var a = genrand_int32() >>> 5,\n b = genrand_int32() >>> 6\n return (a * 67108864.0 + b) * (1.0 / 9007199254740992.0)\n }\n /* These real versions are due to Isaku Wada, 2002/01/09 added */\n\n var random = genrand_res53\n\n random.seed = function(seed){\n if(! seed){seed = Date.now()}\n if(typeof seed != \"number\"){seed = parseInt(seed, 10)}\n if((seed !== 0 && ! seed) || isNaN(seed)){throw \"Bad seed\"}\n init_genrand(seed)\n }\n\n random.seed(seed)\n\n random.int31 = genrand_int31\n random.real1 = genrand_real1\n random.real2 = genrand_real2\n random.real3 = genrand_real3\n random.res53 = genrand_res53\n\n // Added for compatibility with Python\n random.getstate = function(){return [VERSION, mt, mti]}\n\n random.setstate = function(state){\n mt = state[1]\n mti = state[2]\n }\n\n return random\n\n}\n\n// magic constants\n\nvar NV_MAGICCONST = 4 * Math.exp(-0.5)/Math.sqrt(2),\n gauss_next = null,\n NV_MAGICCONST = 1.71552776992141,\n TWOPI = 6.28318530718,\n LOG4 = 1.38629436111989,\n SG_MAGICCONST = 2.50407739677627,\n VERSION = VERSION\n\nvar Random = $B.make_class(\"Random\",\n function(){\n return {\n __class__: Random,\n _random: RandomStream()\n }\n }\n)\n\nRandom._randbelow = function(self, x){\n return Math.floor(x * self._random())\n}\n\nRandom._urandom = function(self, n){\n /*\n urandom(n) -> str\n Return n random bytes suitable for cryptographic use.\n */\n\n var randbytes = []\n for(i = 0; i < n; i++){randbytes.push(parseInt(self._random() * 256))}\n return _b_.bytes.$factory(randbytes)\n}\n\nRandom.betavariate = function(){\n /* Beta distribution.\n\n Conditions on the parameters are alpha > 0 and beta > 0.\n Returned values range between 0 and 1.\n\n\n # This version due to Janne Sinkkonen, and matches all the std\n # texts (e.g., Knuth Vol 2 Ed 3 pg 134 \"the beta distribution\").\n */\n\n var $ = $B.args('betavariate', 3, {self: null, alpha:null, beta:null},\n ['self', 'alpha', 'beta'], arguments, {}, null, null),\n self = $.self,\n alpha = $.alpha,\n beta = $.beta\n\n var y = Random.gammavariate(self, alpha, 1)\n if(y == 0){return _b_.float.$factory(0)}\n else{return y / (y + Random.gammavariate(self, beta, 1))}\n}\n\nRandom.choice = function(){\n var $ = $B.args(\"choice\", 2,\n {self: null, seq:null},[\"self\", \"seq\"],arguments, {}, null, null),\n self = $.self,\n seq = $.seq\n var len, rank\n if(Array.isArray(seq)){len = seq.length}\n else{len = _b_.getattr(seq,\"__len__\")()}\n if(len == 0){\n throw _b_.IndexError.$factory(\"Cannot choose from an empty sequence\")\n }\n rank = parseInt(self._random() * len)\n if(Array.isArray(seq)){return seq[rank]}\n else{return _b_.getattr(seq, \"__getitem__\")(rank)}\n}\n\nRandom.choices = function(){\n var $ = $B.args(\"choices\", 3,\n {self: null,population:null, weights:null, cum_weights:null, k:null},\n [\"self\", \"population\", \"weights\", \"cum_weights\", \"k\"], arguments,\n {weights: _b_.None, cum_weights: _b_.None, k: 1}, \"*\", null),\n self = $.self,\n population = $.population,\n weights = $.weights,\n cum_weights = $.cum_weights,\n k = $.k\n\n if(population.length == 0){\n throw _b_.ValueError.$factory(\"population is empty\")\n }\n if(weights === _b_.None){\n weights = []\n population.forEach(function(){\n weights.push(1)\n })\n }else if(cum_weights !== _b_.None){\n throw _b_.TypeError.$factory(\"Cannot specify both weights and \" +\n \"cumulative weights\")\n }else{\n if(weights.length != population.length){\n throw _b_.ValueError.$factory('The number of weights does not ' +\n 'match the population')\n }\n }\n if(cum_weights === _b_.None){\n var cum_weights = [weights[0]]\n weights.forEach(function(weight, rank){\n if(rank > 0){\n cum_weights.push(cum_weights[rank - 1] + weight)\n }\n })\n }else if(cum_weights.length != population.length){\n throw _b_.ValueError.$factory('The number of weights does not ' +\n 'match the population')\n }\n\n var result = []\n for(var i = 0; i < k; i++){\n var rand = self._random() * cum_weights[cum_weights.length - 1]\n for(var rank = 0, len = population.length; rank < len; rank++){\n if(cum_weights[rank] > rand){\n result.push(population[rank])\n break\n }\n }\n }\n return result\n}\n\nRandom.expovariate = function(self, lambd){\n /*\n Exponential distribution.\n\n lambd is 1.0 divided by the desired mean. It should be\n nonzero. (The parameter would be called \"lambda\", but that is\n a reserved word in Python.) Returned values range from 0 to\n positive infinity if lambd is positive, and from negative\n infinity to 0 if lambd is negative.\n\n */\n // lambd: rate lambd = 1/mean\n // ('lambda' is a Python reserved word)\n\n // we use 1-random() instead of random() to preclude the\n // possibility of taking the log of zero.\n return -Math.log(1.0 - self._random()) / lambd\n}\n\nRandom.gammavariate = function(self, alpha, beta){\n /* Gamma distribution. Not the gamma function!\n\n Conditions on the parameters are alpha > 0 and beta > 0.\n\n The probability distribution function is:\n\n x ** (alpha - 1) * math.exp(-x / beta)\n pdf(x) = --------------------------------------\n math.gamma(alpha) * beta ** alpha\n\n */\n\n // alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2\n\n // Warning: a few older sources define the gamma distribution in terms\n // of alpha > -1.0\n\n var $ = $B.args('gammavariate', 3,\n {self: null, alpha:null, beta:null},\n ['self', 'alpha', 'beta'],\n arguments, {}, null, null),\n self = $.self,\n alpha = $.alpha,\n beta = $.beta,\n LOG4 = Math.log(4),\n SG_MAGICCONST = 1.0 + Math.log(4.5)\n\n if(alpha <= 0.0 || beta <= 0.0){\n throw _b_.ValueError.$factory('gammavariate: alpha and beta must be > 0.0')\n }\n\n if(alpha > 1.0){\n\n // Uses R.C.H. Cheng, \"The generation of Gamma\n // variables with non-integral shape parameters\",\n // Applied Statistics, (1977), 26, No. 1, p71-74\n\n var ainv = Math.sqrt(2.0 * alpha - 1.0),\n bbb = alpha - LOG4,\n ccc = alpha + ainv\n\n while(true){\n var u1 = self._random()\n if(!((1e-7 < u1) && (u1 < .9999999))){\n continue\n }\n var u2 = 1.0 - self._random(),\n v = Math.log(u1 / (1.0 - u1)) / ainv,\n x = alpha * Math.exp(v),\n z = u1 * u1 * u2,\n r = bbb + ccc * v - x\n if((r + SG_MAGICCONST - 4.5 * z >= 0.0) || r >= Math.log(z)){\n return x * beta\n }\n }\n }else if(alpha == 1.0){\n // expovariate(1)\n var u = self._random()\n while(u <= 1e-7){u = self._random()}\n return -Math.log(u) * beta\n }else{\n // alpha is between 0 and 1 (exclusive)\n\n // Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle\n\n while(true){\n var u = self._random(),\n b = (Math.E + alpha)/Math.E,\n p = b*u,\n x\n if(p <= 1.0){x = Math.pow(p, (1.0/alpha))}\n else{x = -Math.log((b-p)/alpha)}\n var u1 = self._random()\n if(p > 1.0){\n if(u1 <= Math.pow(x, alpha - 1.0)){\n break\n }\n }else if(u1 <= Math.exp(-x)){\n break\n }\n }\n return x * beta\n }\n}\n\nRandom.gauss = function(){\n\n /* Gaussian distribution.\n\n mu is the mean, and sigma is the standard deviation. This is\n slightly faster than the normalvariate() function.\n\n Not thread-safe without a lock around calls.\n\n # When x and y are two variables from [0, 1), uniformly\n # distributed, then\n #\n # cos(2*pi*x)*sqrt(-2*log(1-y))\n # sin(2*pi*x)*sqrt(-2*log(1-y))\n #\n # are two *independent* variables with normal distribution\n # (mu = 0, sigma = 1).\n # (Lambert Meertens)\n # (corrected version; bug discovered by Mike Miller, fixed by LM)\n\n # Multithreading note: When two threads call this function\n # simultaneously, it is possible that they will receive the\n # same return value. The window is very small though. To\n # avoid this, you have to use a lock around all calls. (I\n # didn't want to slow this down in the serial case by using a\n # lock here.)\n */\n\n var $ = $B.args('gauss', 3, {self: null, mu:null, sigma:null},\n ['self', 'mu', 'sigma'], arguments, {}, null, null),\n self = $.self,\n mu = $.mu,\n sigma = $.sigma\n\n var z = gauss_next\n gauss_next = null\n if(z === null){\n var x2pi = self._random() * Math.PI * 2,\n g2rad = Math.sqrt(-2.0 * Math.log(1.0 - self._random())),\n z = Math.cos(x2pi) * g2rad\n gauss_next = Math.sin(x2pi) * g2rad\n }\n return mu + z*sigma\n}\n\nRandom.getrandbits = function(){\n var $ = $B.args(\"getrandbits\", 2,\n {self: null, k:null},[\"self\", \"k\"],arguments, {}, null, null),\n self = $.self,\n k = $B.$GetInt($.k)\n // getrandbits(k) -> x. Generates a long int with k random bits.\n if(k <= 0){\n throw _b_.ValueError.$factory('number of bits must be greater than zero')\n }\n if(k != _b_.int.$factory(k)){\n throw _b_.TypeError.$factory('number of bits should be an integer')\n }\n var numbytes = (k + 7), // bits / 8 and rounded up\n x = _b_.int.from_bytes(Random._urandom(self, numbytes), 'big')\n return _b_.getattr(x, '__rshift__')(\n _b_.getattr(numbytes*8,'__sub__')(k))\n}\n\nRandom.getstate = function(){\n // Return internal state; can be passed to setstate() later.\n var $ = $B.args('getstate', 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n return $.self._random.getstate()\n}\n\nRandom.lognormvariate = function(){\n /*\n Log normal distribution.\n\n If you take the natural logarithm of this distribution, you'll get a\n normal distribution with mean mu and standard deviation sigma.\n mu can have any value, and sigma must be greater than zero.\n\n */\n return Math.exp(Random.normalvariate.apply(null, arguments))\n}\n\nRandom.normalvariate = function(){\n /*\n Normal distribution.\n\n mu is the mean, and sigma is the standard deviation.\n\n */\n\n // mu = mean, sigma = standard deviation\n\n // Uses Kinderman and Monahan method. Reference: Kinderman,\n // A.J. and Monahan, J.F., \"Computer generation of random\n // variables using the ratio of uniform deviates\", ACM Trans\n // Math Software, 3, (1977), pp257-260.\n\n var $ = $B.args(\"normalvariate\", 3,\n {self: null, mu:null, sigma:null}, [\"self\", \"mu\", \"sigma\"],\n arguments, {}, null, null),\n self = $.self,\n mu = $.mu,\n sigma = $.sigma\n\n while(true){\n var u1 = self._random(),\n u2 = 1.0 - self._random(),\n z = NV_MAGICCONST * (u1 - 0.5) / u2,\n zz = z * z / 4.0\n if(zz <= -Math.log(u2)){break}\n }\n return mu + z * sigma\n}\n\nRandom.paretovariate = function(){\n /* Pareto distribution. alpha is the shape parameter.*/\n // Jain, pg. 495\n\n var $ = $B.args(\"paretovariate\", 2, {self: null, alpha:null},\n [\"self\", \"alpha\"], arguments, {}, null, null)\n\n var u = 1 - $.self._random()\n return 1 / Math.pow(u, 1 / $.alpha)\n}\n\nRandom.randint = function(self, a, b){\n var $ = $B.args('randint', 3,\n {self: null, a:null, b:null},\n ['self', 'a', 'b'],\n arguments, {}, null, null)\n if(! _b_.isinstance($.b, _b_.int)){\n throw _b_.ValueError.$factory(\"non-integer arg 1 for randrange()\")\n }\n return Random.randrange($.self, $.a, $.b + 1)\n}\n\nRandom.random = function(self){\n var res = self._random()\n if(! Number.isInteger(res)){return new Number(res)}\n return res\n}\n\nRandom.randrange = function(){\n var $ = $B.args('randrange', 4,\n {self: null, x:null, stop:null, step:null},\n ['self', 'x', 'stop', 'step'],\n arguments, {stop:null, step:null}, null, null),\n self = $.self,\n _random = self._random\n //console.log(\"randrange\", $)\n for(var i = 1, len = arguments.length; i < len; i++){\n if(! _b_.isinstance(arguments[i], _b_.int)){\n throw _b_.ValueError.$factory(\"non-integer arg \" + i +\n \" for randrange()\")\n }\n }\n if($.stop === null){\n var start = 0, stop = $.x, step = 1\n }else{\n var start = $.x, stop = $.stop,\n step = $.step === null ? 1 : $.step\n if(step == 0){throw _b_.ValueError.$factory('step cannot be 0')}\n }\n if((step > 0 && start > stop) || (step < 0 && start < stop)){\n throw _b_.ValueError.$factory(\"empty range for randrange() (\" +\n start + \", \" + stop + \", \" + step + \")\")\n }\n if(typeof start == 'number' && typeof stop == 'number' &&\n typeof step == 'number'){\n return start + step * Math.floor(_random() *\n Math.ceil((stop - start) / step))\n }else{\n var d = _b_.getattr(stop, '__sub__')(start)\n d = _b_.getattr(d, '__floordiv__')(step)\n // Force d to be a LongInt\n d = $B.long_int.$factory(d)\n // d is a long integer with n digits ; to choose a random number\n // between 0 and d the most simple is to take a random digit\n // at each position, except the first one\n var s = d.value,\n _len = s.length,\n res = Math.floor(_random() * (parseInt(s.charAt(0)) +\n (_len == 1 ? 0 : 1))) + ''\n var same_start = res.charAt(0) == s.charAt(0)\n for(var i = 1; i < _len; i++){\n if(same_start){\n // If it's the last digit, don't allow stop as valid\n if(i == _len - 1){\n res += Math.floor(_random() * parseInt(s.charAt(i))) + ''\n }else{\n res += Math.floor(_random() *\n (parseInt(s.charAt(i)) + 1)) + ''\n same_start = res.charAt(i) == s.charAt(i)\n }\n }else{\n res += Math.floor(_random() * 10) + ''\n }\n }\n var offset = {__class__: $B.long_int, value: res,\n pos: true}\n d = _b_.getattr(step, '__mul__')(offset)\n d = _b_.getattr(start, '__add__')(d)\n return _b_.int.$factory(d)\n }\n}\n\nRandom.sample = function(){\n /*\n Chooses k unique random elements from a population sequence or set.\n\n Returns a new list containing elements from the population while\n leaving the original population unchanged. The resulting list is\n in selection order so that all sub-slices will also be valid random\n samples. This allows raffle winners (the sample) to be partitioned\n into grand prize and second place winners (the subslices).\n\n Members of the population need not be hashable or unique. If the\n population contains repeats, then each occurrence is a possible\n selection in the sample.\n\n To choose a sample in a range of integers, use range as an argument.\n This is especially fast and space efficient for sampling from a\n large population: sample(range(10000000), 60)\n\n # Sampling without replacement entails tracking either potential\n # selections (the pool) in a list or previous selections in a set.\n\n # When the number of selections is small compared to the\n # population, then tracking selections is efficient, requiring\n # only a small set and an occasional reselection. For\n # a larger number of selections, the pool tracking method is\n # preferred since the list takes less space than the\n # set and it doesn't suffer from frequent reselections.'\n\n */\n var $ = $B.args('sample', 3, {self: null, population: null,k: null},\n ['self', 'population','k'], arguments, {}, null, null),\n self = $.self,\n population = $.population,\n k = $.k\n\n if(!_b_.hasattr(population, '__len__')){\n throw _b_.TypeError.$factory(\"Population must be a sequence or set. \" +\n \"For dicts, use list(d).\")\n }\n var n = _b_.getattr(population, '__len__')()\n\n if(k < 0 || k > n){\n throw _b_.ValueError.$factory(\"Sample larger than population\")\n }\n var result = [],\n setsize = 21 // size of a small set minus size of an empty list\n if(k > 5){\n setsize += Math.pow(4, Math.ceil(Math.log(k * 3, 4))) // table size for big sets\n }\n if(n <= setsize){\n // An n-length list is smaller than a k-length set\n if(Array.isArray(population)){\n var pool = population.slice()\n }else{var pool = _b_.list.$factory(population)}\n for(var i = 0; i < k; i++){ //invariant: non-selected at [0,n-i)\n var j = Random._randbelow(self, n - i)\n result[i] = pool[j]\n pool[j] = pool[n - i - 1] // move non-selected item into vacancy\n }\n }else{\n selected = {}\n for(var i = 0; i < k; i++){\n var j = Random._randbelow(self, n)\n while(selected[j] !== undefined){\n j = Random._randbelow(self, n)\n }\n selected[j] = true\n result[i] = Array.isArray(population) ? population[j] :\n _b_.getattr(population, '__getitem__')(j)\n }\n }\n return result\n}\n\nRandom.seed = function(){\n /*\n Initialize internal state from hashable object.\n\n None or no argument seeds from current time or from an operating\n system specific randomness source if available.\n\n If *a* is an int, all bits are used.\n */\n var $ = $B.args('seed', 3, {self: null, a: null, version: null},\n ['self', 'a', 'version'],\n arguments, {a: new Date(), version: 2}, null, null),\n self = $.self,\n a = $.a,\n version = $.version\n\n if(version == 1){a = _b_.hash(a)}\n else if(version == 2){\n if(_b_.isinstance(a, _b_.str)){\n a = _b_.int.from_bytes(_b_.bytes.$factory(a, 'utf-8'), 'big')\n }else if(_b_.isinstance(a, [_b_.bytes, _b_.bytearray])){\n a = _b_.int.from_bytes(a, 'big')\n }else if(!_b_.isinstance(a, _b_.int)){\n throw _b_.TypeError.$factory('wrong argument')\n }\n if(a.__class__ === $B.long_int){\n // In this implementation, seed() only accepts safe integers\n // Generate a random one from the underlying string value,\n // using an arbitrary seed (99) to always return the same\n // integer\n var numbers = a.value,\n res = '',\n pos\n self._random.seed(99)\n for(var i = 0; i < 17; i++){\n pos = parseInt(self._random() * numbers.length)\n res += numbers.charAt(pos)\n }\n a = parseInt(res)\n }\n }else{\n throw ValueError.$factory('version can only be 1 or 2')\n }\n\n self._random.seed(a)\n gauss_next = null\n}\n\nRandom.setstate = function(state){\n // Restore internal state from object returned by getstate().\n var $ = $B.args('setstate', 2, {self: null, state:null}, ['self', 'state'],\n arguments, {}, null, null),\n self = $.self\n var state = self._random.getstate()\n if(! Array.isArray($.state)){\n throw _b_.TypeError.$factory('state must be a list, not ' +\n $B.class_name($.state))\n }\n if($.state.length < state.length){\n throw _b_.ValueError.$factory(\"need more than \" + $.state.length +\n \" values to unpack\")\n }else if($.state.length > state.length){\n throw _b_.ValueError.$factory(\"too many values to unpack (expected \" +\n state.length + \")\")\n }\n if($.state[0] != 3){\n throw _b_.ValueError.$factory(\"ValueError: state with version \" +\n $.state[0] + \" passed to Random.setstate() of version 3\")\n }\n var second = _b_.list.$factory($.state[1])\n if(second.length !== state[1].length){\n throw _b_.ValueError.$factory('state vector is the wrong size')\n }\n for(var i = 0; i < second.length; i++){\n if(typeof second[i] != 'number'){\n throw _b_.ValueError.$factory('state vector items must be integers')\n }\n }\n self._random.setstate($.state)\n}\n\nRandom.shuffle = function(x, random){\n /*\n x, random = random.random -> shuffle list x in place; return None.\n\n Optional arg random is a 0-argument function returning a random\n float in [0.0, 1.0); by default, the standard random.random.\n */\n\n var $ = $B.args('shuffle', 3, {self: null, x: null, random: null},\n ['self', 'x','random'],\n arguments, {random: null}, null, null),\n self = $.self,\n x = $.x,\n random = $.random\n\n if(random === null){random = self._random}\n\n if(Array.isArray(x)){\n for(var i = x.length - 1; i >= 0;i--){\n var j = Math.floor(random() * (i + 1)),\n temp = x[j]\n x[j] = x[i]\n x[i] = temp\n }\n }else{\n var len = _b_.getattr(x, '__len__')(), temp,\n x_get = _b_.getattr(x, '__getitem__'),\n x_set = _b_.getattr(x, '__setitem__')\n\n for(i = len - 1; i >= 0; i--){\n var j = Math.floor(random() * (i + 1)),\n temp = x_get(j)\n x_set(j, x_get(i))\n x_set(i, temp)\n }\n }\n}\n\nRandom.triangular = function(){\n /*\n Triangular distribution.\n\n Continuous distribution bounded by given lower and upper limits,\n and having a given mode value in-between.\n\n http://en.wikipedia.org/wiki/Triangular_distribution\n */\n var $ = $B.args('triangular', 4,\n {self: null, low: null, high: null, mode: null},\n ['self', 'low', 'high', 'mode'],\n arguments, {low: 0, high: 1, mode: null}, null, null),\n low = $.low,\n high = $.high,\n mode = $.mode\n\n var u = $.self._random(),\n c = mode === null ? 0.5 : (mode - low) / (high - low)\n if(u > c){\n u = 1 - u\n c = 1 - c\n var temp = low\n low = high\n high = temp\n }\n return low + (high - low) * Math.pow(u * c, 0.5)\n}\n\nRandom.uniform = function(){\n var $ = $B.args('uniform', 3, {self: null, a: null, b: null},\n ['self', 'a', 'b'], arguments, {}, null, null),\n a = $B.$GetInt($.a),\n b = $B.$GetInt($.b)\n\n return a + (b - a) * $.self._random()\n}\n\nRandom.vonmisesvariate = function(){\n /* Circular data distribution.\n\n mu is the mean angle, expressed in radians between 0 and 2*pi, and\n kappa is the concentration parameter, which must be greater than or\n equal to zero. If kappa is equal to zero, this distribution reduces\n to a uniform random angle over the range 0 to 2*pi.\n\n */\n // mu: mean angle (in radians between 0 and 2*pi)\n // kappa: concentration parameter kappa (>= 0)\n // if kappa = 0 generate uniform random angle\n\n // Based upon an algorithm published in: Fisher, N.I.,\n // \"Statistical Analysis of Circular Data\", Cambridge\n // University Press, 1993.\n\n // Thanks to Magnus Kessler for a correction to the\n // implementation of step 4.\n\n var $ = $B.args('vonmisesvariate', 3,\n {self: null, mu: null, kappa:null}, ['self', 'mu', 'kappa'],\n arguments, {}, null, null),\n self = $.self,\n mu = $.mu,\n kappa = $.kappa,\n TWOPI = 2*Math.PI\n\n if(kappa <= 1e-6){return TWOPI * self._random()}\n\n var s = 0.5 / kappa,\n r = s + Math.sqrt(1.0 + s * s)\n\n while(true){\n var u1 = self._random(),\n z = Math.cos(Math.PI * u1),\n d = z / (r + z),\n u2 = self._random()\n if((u2 < 1.0 - d * d) ||\n (u2 <= (1.0 - d) * Math.exp(d))){\n break\n }\n }\n var q = 1.0 / r,\n f = (q + z) / (1.0 + q * z),\n u3 = self._random()\n if(u3 > 0.5){var theta = (mu + Math.acos(f)) % TWOPI}\n else{var theta = (mu - Math.acos(f)) % TWOPI}\n return theta\n}\n\nRandom.weibullvariate = function(){\n /*Weibull distribution.\n\n alpha is the scale parameter and beta is the shape parameter.\n\n */\n // Jain, pg. 499; bug fix courtesy Bill Arms\n var $ = $B.args(\"weibullvariate\", 3,\n {self: null, alpha: null, beta: null},\n [\"self\", \"alpha\", \"beta\"], arguments, {}, null, null)\n\n var u = 1 - $.self._random()\n return $.alpha * Math.pow(-Math.log(u), 1 / $.beta)\n}\n\n$B.set_func_names(Random, \"random\")\n\nvar $module = Random.$factory()\nfor(var attr in Random){\n $module[attr] = (function(x){\n return function(){return Random[x]($module, ...arguments)}\n })(attr)\n $module[attr].$infos = Random[attr].$infos\n}\n\n$module.Random = Random\n\nvar SystemRandom = $B.make_class(\"SystemRandom\",\n function(){\n return {__class__: SystemRandom}\n }\n)\nSystemRandom.__getattribute__ = function(){\n throw $B.builtins.NotImplementedError.$factory()\n}\n\n$module.SystemRandom = SystemRandom\n\nreturn $module\n\n})(__BRYTHON__)\n\n"], "unicodedata": [".js", "// Implementation of unicodedata\n\nvar $module = (function($B){\n\n var _b_ = $B.builtins\n\n // Load unicode table if not already loaded\n if($B.unicodedb === undefined){\n var xhr = new XMLHttpRequest\n xhr.open(\"GET\",\n $B.brython_path + \"unicode.txt\", false)\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n if(this.status == 200){\n $B.unicodedb = this.responseText\n }else{\n console.log(\"Warning - could not \" +\n \"load unicode.txt\")\n }\n }\n }\n xhr.send()\n }\n\n function _info(chr){\n var ord = _b_.ord(chr),\n hex = ord.toString(16).toUpperCase()\n while(hex.length < 4){hex = \"0\" + hex}\n var re = new RegExp(\"^\" + hex +\";(.+?);(.*?);(.*?);(.*?);(.*?);(.*);(.*);(.*)$\",\n \"m\"),\n search = re.exec($B.unicodedb)\n if(search === null){\n console.log(\"null\", re)\n return null\n }else{\n return {\n name: search[1],\n category: search[2],\n combining: search[3],\n bidirectional: search[4],\n decomposition: search[5],\n decimal: search[6],\n digit: search[7],\n numeric: search[8]\n }\n }\n }\n\n function bidirectional(chr){\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr, hex)\n throw _b_.KeyError.$factory(chr)\n }\n return search.bidirectional\n }\n\n function category(chr){\n // Returns the general category assigned to the character chr as\n // string.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr, hex)\n throw _b_.KeyError.$factory(chr)\n }\n return search.category\n }\n\n function combining(chr){\n // Returns the general category assigned to the character chr as\n // string.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.combining)\n }\n\n function decimal(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.decimal)\n }\n\n function decomposition(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return search.decomposition\n }\n\n function digit(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.digit)\n }\n\n function lookup(name){\n // Look up character by name. If a character with the given name is\n // found, return the corresponding character. If not found, KeyError\n // is raised.\n var re = new RegExp(\"^([0-9A-F]+);\" +\n name + \";(.*)$\", \"m\")\n search = re.exec($B.unicodedb)\n if(search === null){\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n name + \"'\")\n }\n var res = parseInt(search[1], 16)\n return _b_.chr(res)\n }\n\n function name(chr, _default){\n // Returns the name assigned to the character chr as a string. If no\n // name is defined, default is returned, or, if not given, ValueError\n // is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n chr + \"'\")\n }\n return search.name\n }\n\n function normalize(form, unistr){\n var search = _info(unistr)\n if(search === null){\n throw _b_.KeyError.$factory(unistr)\n }\n switch(form){\n case \"NFC\":\n return unistr\n case \"NFD\":\n var decomp = decomposition(unistr),\n parts = decomp.split(\" \"),\n res = \"\"\n if(parts[0].startsWith(\"<\")){\n return unistr\n }\n parts.forEach(function(part){\n if(! part.startsWith(\"<\")){\n res += _b_.chr(parseInt(part, 16))\n }\n })\n return res\n case \"NFKC\":\n var decomp = decomposition(unistr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return unistr\n case \"NFKD\":\n var decomp = decomposition(unistr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return unistr\n\n default:\n throw _b_.ValueError.$factory(\"invalid normalization form\")\n }\n }\n\n function numeric(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(chr)\n }\n return new Number(eval(search.numeric))\n }\n\n return {\n bidirectional: bidirectional,\n category: category,\n combining: combining,\n decimal: decimal,\n decomposition: decomposition,\n digit: digit,\n lookup: lookup,\n name: name,\n normalize: normalize,\n numeric: numeric,\n unidata_version: \"11.0.0\"\n }\n\n})(__BRYTHON__)"], "_aio": [".js", "// Replacement for asyncio.\n//\n// CPython asyncio can't be implemented for Brython because it relies on\n// blocking function (eg run(), run_until_complete()), and such functions\n// can't be defined in Javascript. It also manages an event loop, and a\n// browser only has its own built-in event loop.\n//\n// This module exposes functions whose result can be \"await\"-ed inside\n// asynchrounous functions defined by \"async def\".\n\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\n\nvar responseType = {\n \"text\": \"text\",\n \"binary\": \"arraybuffer\",\n \"dataURL\": \"arraybuffer\"\n}\n\nfunction handle_kwargs(kw, method){\n var data,\n cache = \"no-cache\",\n format = \"text\",\n headers = {},\n timeout = {}\n for(var key in kw.$string_dict){\n if(key == \"data\"){\n var params = kw.$string_dict[key]\n if(typeof params == \"string\"){\n data = params\n }else{\n if(params.__class__ !== _b_.dict){\n throw _b_.TypeError.$factory(\"wrong type for data, \" +\n \"expected dict or str, got \" + $B.class_name(params))\n }\n params = params.$string_dict\n var items = []\n for(var key in params){\n items.push(encodeURIComponent(key) + \"=\" +\n encodeURIComponent(params[key]))\n }\n data = items.join(\"&\")\n }\n }else if(key == \"headers\"){\n headers = kw.$string_dict[key].$string_dict\n }else if(key.startsWith(\"on\")){\n var event = key.substr(2)\n if(event == \"timeout\"){\n timeout.func = kw.$string_dict[key]\n }else{\n ajax.bind(self, event, kw.$string_dict[key])\n }\n }else if(key == \"timeout\"){\n timeout.seconds = kw.$string_dict[key]\n }else if(key == \"cache\"){\n cache = kw.$string_dict[key]\n }else if(key == \"format\"){\n format = kw.$string_dict[key]\n }\n }\n if(method == \"post\"){\n // For POST requests, set default header\n if(! headers.hasOwnProperty(\"Content-type\")){\n headers[\"Content-Type\"] = \"application/x-www-form-urlencoded\"\n }\n if(data && !headers.hasOwnProperty(\"Content-Length\")){\n headers[\"Content-Length\"] = data.length\n }\n }\n return {\n body: data,\n cache: cache,\n format: format,\n timeout: timeout,\n headers: headers\n }\n}\n\nfunction ajax(){\n var $ = $B.args(\"ajax\", 2, {method: null, url: null},\n [\"method\", \"url\"], arguments, {},\n null, \"kw\"),\n method = $.method,\n url = $.url,\n kw = $.kw\n var args = handle_kwargs(kw, \"get\")\n if(! args.cache){\n url = \"?ts\" + (new Date()).getTime() + \"=0\"\n }\n if(args.body){\n url = url + (args.cache ? \"?\" : \"&\") + args.body\n }\n return {\n __class__: $B.coroutine,\n $args: [url, args],\n $func: function(){\n return new Promise(function(resolve, reject){\n var xhr = new XMLHttpRequest()\n xhr.open(method, url, true)\n for(key in args.headers){\n xhr.setRequestHeader(key, args.headers[key])\n }\n xhr.format = args.format\n xhr.responseType = responseType[args.format]\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n this.__class__ = HTTPRequest\n resolve(this)\n }\n }\n xhr.send()\n })\n }\n }\n}\n\nfunction event(){\n // event(element, name) is a Promise on the event \"name\" happening on the\n // element. This promise always resolves (never rejects) with the DOM event.\n var $ = $B.args(\"event\", 2, {element: null, name: null},\n [\"element\", \"name\"], arguments, {}, null, null),\n element = $.element,\n name = $.name\n return new Promise(function(resolve){\n element.elt.addEventListener(name, function(evt){\n resolve($B.$DOMEvent(evt))\n })\n })\n}\n\nvar HTTPRequest = $B.make_class(\"Request\")\n\nHTTPRequest.data = _b_.property.$factory(function(self){\n if(self.format == \"binary\"){\n var view = new Uint8Array(self.response)\n return _b_.bytes.$factory(Array.from(view))\n }else if(self.format == \"text\"){\n return self.responseText\n }else if(self.format == \"dataURL\"){\n var base64String = btoa(String.fromCharCode.apply(null,\n new Uint8Array(self.response)))\n return \"data:\" + self.getResponseHeader(\"Content-Type\") +\n \";base64,\" + base64String\n }\n})\n\nHTTPRequest.response_headers = _b_.property.$factory(function(self){\n var headers = self.getAllResponseHeaders()\n if(headers === null){return _b_.None}\n var res = _b_.dict.$factory()\n if(headers.length > 0){\n // Convert the header string into an array\n // of individual headers\n var lines = headers.trim().split(/[\\r\\n]+/)\n // Create a map of header names to values\n lines.forEach(function(line){\n var parts = line.split(': ')\n var header = parts.shift()\n var value = parts.join(': ')\n res.$string_dict[header] = value\n })\n }\n return res\n})\n\nfunction get(){\n var args = [\"GET\"]\n for(var i = 0, len = arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return ajax.apply(null, args)\n}\n\nfunction iscoroutine(f){\n return f.__class__ === $B.coroutine\n}\n\nfunction iscoroutinefunction(f){\n return (f.$infos.__code__.co_flags & 128) != 0\n}\n\nfunction post(){\n var args = [\"POST\"]\n for(var i = 0, len = arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return ajax.apply(null, args)\n}\n\nfunction run(coro){\n var handle_success = function(){\n $B.leave_frame()\n },\n handle_error = function(ev){\n var err_msg = \"Traceback (most recent call last):\\n\"\n err_msg += $B.print_stack(ev.$stack)\n err_msg += \"\\n\" + ev.__class__.$infos.__name__ +\n ': ' + ev.args[0]\n $B.builtins.print(err_msg)\n throw ev\n }\n\n var $ = $B.args(\"run\", 3, {coro: null, onsuccess: null, onerror: null},\n [\"coro\", \"onsuccess\", \"onerror\"], arguments,\n {onsuccess: handle_success, onerror: handle_error},\n null, null),\n coro = $.coro,\n onsuccess = $.onsuccess,\n onerror = $.onerror\n\n if(onerror !== handle_error){\n function error_func(exc){\n try{\n onerror(exc)\n }catch(err){\n handle_error(err)\n }\n }\n }else{\n error_func = handle_error\n }\n // Add top frame a second time to get the correct frame when the async\n // function exits\n $B.frames_stack.push($B.last($B.frames_stack))\n $B.coroutine.send(coro).then(onsuccess).catch(error_func)\n return _b_.None\n}\n\nfunction sleep(seconds){\n return {\n __class__: $B.coroutine,\n $args: [seconds],\n $func: function(){\n return new Promise(resolve => setTimeout(resolve, 1000 * seconds))\n }\n }\n}\n\nreturn {\n ajax: ajax,\n event: event,\n get: get,\n iscoroutine: iscoroutine,\n iscoroutinefunction: iscoroutinefunction,\n post: post,\n run: run,\n sleep: sleep\n}\n\n})(__BRYTHON__)\n"], "_ajax": [".js", "// ajax\nvar $module = (function($B){\n\neval($B.InjectBuiltins())\nvar $N = $B.builtins.None,\n _b_ = $B.builtins\n\nvar add_to_res = function(res, key, val) {\n if(isinstance(val, list)){\n for (j = 0; j < val.length; j++) {\n add_to_res(res, key, val[j])\n }\n }else if (val instanceof File || val instanceof Blob){\n res.append(key, val)\n }else{res.append(key,str.$factory(val))}\n}\n\nfunction set_timeout(self, timeout){\n if(timeout.seconds !== undefined){\n self.js.$requestTimer = setTimeout(\n function() {\n self.js.abort()\n if(timeout.func){\n timeout.func()\n }\n },\n timeout.seconds * 1000)\n }\n}\n\nfunction _read(req){\n var xhr = req.js,\n res\n if(xhr.responseType == \"arraybuffer\"){\n var abuf = new Uint8Array(xhr.response)\n res = []\n for(var i = 0, len = abuf.length; i < len; i++){\n res.push(abuf[i])\n }\n return _b_.bytes.$factory(res)\n }else{\n return xhr.responseText\n }\n}\n\nfunction handle_kwargs(self, kw, method){\n var data,\n headers,\n cache,\n mode,\n timeout = {}\n for(var key in kw.$string_dict){\n if(key == \"data\"){\n var params = kw.$string_dict[key]\n if(typeof params == \"string\"){\n data = params\n }else{\n if(params.__class__ !== _b_.dict){\n throw _b_.TypeError.$factory(\"wrong type for data, \" +\n \"expected dict or str, got \" + $B.class_name(params))\n }\n params = params.$string_dict\n var items = []\n for(var key in params){\n items.push(encodeURIComponent(key) + \"=\" +\n encodeURIComponent(params[key]))\n }\n data = items.join(\"&\")\n }\n }else if(key==\"headers\"){\n headers = kw.$string_dict[key].$string_dict\n for(var key in headers){\n self.js.setRequestHeader(key, headers[key])\n }\n }else if(key.startsWith(\"on\")){\n var event = key.substr(2)\n if(event == \"timeout\"){\n timeout.func = kw.$string_dict[key]\n }else{\n var f = kw.$string_dict[key]\n ajax.bind(self, event, f)\n }\n }else if(key == \"mode\"){\n if(kw.$string_dict[key] == \"binary\"){\n self.js.responseType = \"arraybuffer\"\n mode = \"binary\"\n }\n }else if(key == \"timeout\"){\n timeout.seconds = kw.$string_dict[key]\n }else if(key == \"cache\"){\n cache = kw.$string_dict[key]\n }\n }\n if(method == \"post\" && ! headers){\n // For POST requests, set default header\n self.js.setRequestHeader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n }\n return {cache: cache, data:data, mode: mode, timeout: timeout}\n}\n\nvar ajax = {\n __class__: _b_.type,\n __mro__: [$B.JSObject, _b_.object],\n\n __getattribute__ : function(self, attr){\n // Special case for send : accept dict as parameters\n if(attr == 'send'){\n return function(params){\n return ajax.send(self, params)\n }\n }else if(attr == 'xml'){ // alias\n attr = 'responseXML'\n }\n // Otherwise default to JSObject method\n return $B.JSObject.__getattribute__(self, attr)\n },\n\n __repr__ : function(self){return ''},\n __str__ : function(self){return ''},\n\n $infos: {\n __module__: \"builtins\",\n __name__: \"ajax\"\n },\n\n bind : function(self, evt, func){\n // req.bind(evt,func) is the same as req.onevt = func\n self.js['on' + evt] = function(){\n try{\n return func.apply(null, arguments)\n }catch(err){\n if(err.__class__ !== undefined){\n var msg = _b_.getattr(err, 'info') +\n '\\n' + err.__class__.$infos.__name__\n if(err.args){msg += ': ' + err.args[0]}\n try{getattr($B.stderr, \"write\")(msg)}\n catch(err){console.log(msg)}\n }else{\n try{getattr($B.stderr, \"write\")(err)}\n catch(err1){console.log(err)}\n }\n }\n }\n return $N\n },\n\n send : function(self, params){\n // params can be Python dictionary or string\n var res = ''\n if(!params){\n self.js.send()\n return $N\n }else if(isinstance(params, str)){\n res = params\n }else if(isinstance(params, dict)){\n if(self.headers['content-type'] == 'multipart/form-data'){\n // The FormData object serializes the data in the 'multipart/form-data'\n // content-type so we may as well override that header if it was set\n // by the user.\n res = new FormData()\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n add_to_res(res, str.$factory(items[i][0]), items[i][1])\n }\n }else{\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n var key = encodeURIComponent(str.$factory(items[i][0]));\n if(isinstance(items[i][1], list)){\n for (j = 0; j < items[i][1].length; j++) {\n res += key +'=' +\n encodeURIComponent(str.$factory(items[i][1][j])) + '&'\n }\n }else{\n res += key + '=' +\n encodeURIComponent(str.$factory(items[i][1])) + '&'\n }\n }\n res = res.substr(0, res.length - 1)\n }\n }else{\n throw _b_.TypeError(\"send() argument must be string or dictionary, not '\" +\n str.$factory(params.__class__) + \"'\")\n }\n self.js.send(res)\n return $N\n },\n\n set_header : function(self,key,value){\n self.js.setRequestHeader(key,value)\n self.headers[key.toLowerCase()] = value.toLowerCase()\n },\n\n set_timeout : function(self, seconds, func){\n self.js.$requestTimer = setTimeout(\n function() {self.js.abort();func()},\n seconds * 1000)\n }\n}\n\najax.$factory = function(){\n\n if(window.XMLHttpRequest){// code for IE7+, Firefox, Chrome, Opera, Safari\n var xmlhttp = new XMLHttpRequest()\n }else{// code for IE6, IE5\n var xmlhttp = new ActiveXObject(\"Microsoft.XMLHTTP\")\n }\n xmlhttp.onreadystatechange = function(){\n // here, \"this\" refers to xmlhttp\n var state = this.readyState\n if(this.responseType == \"\" || this.responseType == \"text\"){\n res.js.text = this.responseText\n }\n var timer = this.$requestTimer\n if(state == 0 && this.onuninitialized){this.onuninitialized(res)}\n else if(state == 1 && this.onloading){this.onloading(res)}\n else if(state == 2 && this.onloaded){this.onloaded(res)}\n else if(state == 3 && this.oninteractive){this.oninteractive(res)}\n else if(state == 4 && this.oncomplete){\n if(timer !== null){window.clearTimeout(timer)}\n this.oncomplete(res)\n }\n }\n var res = {\n __class__: ajax,\n js: xmlhttp,\n headers: {}\n }\n return res\n}\n\nfunction get(){\n var $ = $B.args(\"get\", 2, {url: null, async: null},\n [\"url\", \"async\"], arguments, {async: true},\n null, \"kw\"),\n url = $.url,\n async = $.async,\n kw = $.kw\n var self = ajax.$factory(),\n items = handle_kwargs(self, kw, \"get\"),\n qs = items.data,\n timeout = items.timeout\n set_timeout(self, timeout)\n if(qs){\n url += \"?\" + qs\n }\n if(! (items.cache === true)){\n url += (qs ? \"&\" : \"?\") + (new Date()).getTime()\n }\n // Add function read() to return str or bytes according to mode\n self.js.read = function(){\n return _read(self)\n }\n self.js.open(\"GET\", url, async)\n self.js.send()\n}\n\nfunction post(){\n var $ = $B.args(\"get\", 2, {url: null, async: null},\n [\"url\", \"async\"], arguments, {async: true},\n null, \"kw\"),\n url = $.url,\n async = $.async,\n kw = $.kw,\n data\n var self = ajax.$factory()\n self.js.open(\"POST\", url, async)\n var items = handle_kwargs(self, kw, \"post\"),\n data = items.data,\n timeout = items.timeout\n set_timeout(self, timeout)\n // Add function read() to return str or bytes according to mode\n self.js.read = function(){\n return _read(self)\n }\n self.js.send(data)\n}\n\n$B.set_func_names(ajax)\n\nreturn {ajax: ajax, Ajax: ajax, get: get, post: post}\n\n})(__BRYTHON__)\n"], "_base64": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar Base64 = {\n error: function(){return 'binascii_error'},\n\n encode: function(bytes, altchars){\n\n var input = bytes.source,\n output = \"\",\n chr1, chr2, chr3, enc1, enc2, enc3, enc4\n var i = 0\n\n var alphabet = make_alphabet(altchars)\n\n while(i < input.length){\n\n chr1 = input[i++]\n chr2 = input[i++]\n chr3 = input[i++]\n\n enc1 = chr1 >> 2\n enc2 = ((chr1 & 3) << 4) | (chr2 >> 4)\n enc3 = ((chr2 & 15) << 2) | (chr3 >> 6)\n enc4 = chr3 & 63\n\n if(isNaN(chr2)){\n enc3 = enc4 = 64\n }else if(isNaN(chr3)){\n enc4 = 64\n }\n\n output = output + alphabet.charAt(enc1) +\n alphabet.charAt(enc2) +\n alphabet.charAt(enc3) +\n alphabet.charAt(enc4)\n\n }\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n },\n\n\n decode: function(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw Base64.error(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n\n },\n\n _utf8_encode: function(string) {\n string = string.replace(/\\r\\n/g, \"\\n\")\n var utftext = \"\";\n\n for(var n = 0; n < string.length; n++){\n\n var c = string.charCodeAt(n)\n\n if(c < 128){\n utftext += String.fromCharCode(c)\n }else if((c > 127) && (c < 2048)){\n utftext += String.fromCharCode((c >> 6) | 192)\n utftext += String.fromCharCode((c & 63) | 128)\n }else{\n utftext += String.fromCharCode((c >> 12) | 224)\n utftext += String.fromCharCode(((c >> 6) & 63) | 128)\n utftext += String.fromCharCode((c & 63) | 128)\n }\n\n }\n\n return utftext\n },\n\n _utf8_decode: function(utftext) {\n var string = \"\",\n i = 0,\n c = c1 = c2 = 0\n\n while(i < utftext.length){\n\n c = utftext.charCodeAt(i)\n\n if(c < 128){\n string += String.fromCharCode(c)\n i++\n }else if((c > 191) && (c < 224)){\n c2 = utftext.charCodeAt(i + 1)\n string += String.fromCharCode(((c & 31) << 6) | (c2 & 63))\n i += 2\n }else{\n c2 = utftext.charCodeAt(i + 1)\n c3 = utftext.charCodeAt(i + 2)\n string += String.fromCharCode(\n ((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))\n i += 3\n }\n\n }\n\n return string\n }\n\n}\n\nreturn {Base64:Base64}\n}\n\n)(__BRYTHON__)"], "_binascii": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nvar error = $B.make_class(\"error\", _b_.Exception.$factory)\nerror.__bases__ = [_b_.Exception]\n$B.set_func_names(error, \"binascii\")\n\nfunction decode(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw error.$factory(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n\n}\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar module = {\n a2b_base64: function(){\n var $ = $B.args(\"a2b_base64\", 1, {s: null}, ['s'],\n arguments, {}, null, null)\n return decode($.s)\n },\n b2a_base64: function(){\n var $ = $B.args(\"b2a_base64\", 1, {data: null}, ['data'],\n arguments, {}, null, \"kw\")\n var newline = false\n if($.kw && $.kw.$string_dict){\n newline = $.kw.$string_dict[\"newline\"]\n }\n\n var string = $B.to_bytes($.data),\n res = btoa(String.fromCharCode.apply(null, string))\n if(newline){res += \"\\n\"}\n return _b_.bytes.$factory(res, \"ascii\")\n },\n b2a_hex: function(obj){\n var string = $B.to_bytes(obj),\n res = []\n function conv(c){\n if(c > 9){\n c = c + 'a'.charCodeAt(0) - 10\n }else{\n c = c + '0'.charCodeAt(0)\n }\n return c\n }\n string.forEach(function(char){\n res.push(conv((char >> 4) & 0xf))\n res.push(conv(char & 0xf))\n })\n return _b_.bytes.$factory(res, \"ascii\")\n },\n b2a_uu: function(obj){\n var string = $B.to_bytes(obj)\n var len = string.length,\n res = String.fromCharCode((0x20 + len) & 0x3F)\n while(string.length > 0){\n var s = string.slice(0, 3)\n while(s.length < 3){s.push(String.fromCharCode(0))}\n var A = s[0],\n B = s[1],\n C = s[2]\n var a = (A >> 2) & 0x3F,\n b = ((A << 4) | ((B >> 4) & 0xF)) & 0x3F,\n c = (((B << 2) | ((C >> 6) & 0x3)) & 0x3F),\n d = C & 0x3F\n res += String.fromCharCode(0x20 + a, 0x20 + b, 0x20 + c, 0x20 + d)\n string = string.slice(3)\n }\n return _b_.bytes.$factory(res + \"\\n\", \"ascii\")\n },\n error: error\n}\n\nmodule.hexlify = module.b2a_hex\n\nreturn module\n}\n)(__BRYTHON__)"], "_jsre": [".js", "var $module=(function($B){\n\n var _b_ = $B.builtins\n var $s = []\n for(var $b in _b_) $s.push('var ' + $b +'=_b_[\"' + $b + '\"]')\n eval($s.join(';'))\n\n var JSObject = $B.JSObject\n\n var obj = {__class__: $module,\n __str__: function(){return \"\"}\n }\n obj.A = obj.ASCII = 256\n obj.I = obj.IGNORECASE = 2 // 'i'\n obj.L = obj.LOCALE = 4\n obj.M = obj.MULTILINE = 8 // 'm'\n obj.S = obj.DOTALL = 16\n obj.U = obj.UNICODE = 32\n obj.X = obj.VERBOSE = 64\n obj._is_valid = function(pattern) {\n if ($B.$options.re == 'pyre'){return false} //force use of python's re module\n if ($B.$options.re == 'jsre'){return true} //force use of brythons re module\n // FIXME: Improve\n\n if(! isinstance(pattern, str)){\n // this is probably a SRE_PATTERN, so return false, and let\n // python's re module handle this.\n return false\n }\n var is_valid = false\n try{\n new RegExp(pattern)\n is_valid = true\n }\n catch(e){}\n if(! is_valid){return false} //if js won't parse the pattern return false\n\n // using reference http://www.regular-expressions.info/\n // to compare python re and javascript regex libraries\n\n // look for things javascript does not support\n // check for name capturing group\n var mylist = ['?P=', '?P<', '(?#', '(?<=', '(? -1) return false\n }\n\n var re_list=['\\{,\\d+\\}']\n for(var i=0, _len_i = re_list.length; i < _len_i; i++) {\n var _re = new RegExp(re_list[i])\n if (_re.test(pattern)){return false}\n }\n\n // it looks like the pattern has passed all our tests so lets assume\n // javascript can handle this pattern.\n return true\n }\n var $SRE_PatternDict = {\n __class__:_b_.type,\n $infos:{\n __name__:'SRE_Pattern'\n }\n }\n $SRE_PatternDict.__mro__ = [object]\n $SRE_PatternDict.findall = function(self, string){\n return obj.findall(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.finditer = function(self, string){\n return obj.finditer(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.match = function(self, string){\n return obj.match(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.search = function(self, string){\n return obj.search(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.sub = function(self,repl,string){\n return obj.sub(self.pattern,repl,string,self.flags)\n }\n // TODO: groups\n // TODO: groupindex\n function normflags(flags){\n return ((flags & obj.I)? 'i' : '') + ((flags & obj.M)? 'm' : '');\n }\n // TODO: fullmatch()\n // TODO: split()\n // TODO: subn()\n obj.compile = function(pattern, flags){\n return {\n __class__: $SRE_PatternDict,\n pattern: pattern,\n flags: normflags(flags)\n }\n }\n obj.escape = function(string){\n // Escape all the characters in pattern except ASCII letters, numbers\n // and '_'. This is useful if you want to match an arbitrary literal\n // string that may have regular expression metacharacters in it.\n var res = ''\n var ok = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'\n for(var i = 0, _len_i = string.length; i < _len_i; i++){\n if(ok.search(string.charAt(i))>-1){res += string.charAt(i)}\n }\n return res\n }\n obj.findall = function(pattern, string, flags){\n var $ns=$B.args('re.findall', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{}, 'args', 'kw') ,\n args = $ns['args'] ,\n _flags = 0;\n if(args.length>0){var flags = args[0]}\n else{var _flags = getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern,flags),\n jsmatch = string.match(jsp)\n if(jsmatch === null){return []}\n return jsmatch\n }\n obj.finditer = function(pattern, string, flags){\n var $ns=$B.args('re.finditer', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{},'args','kw'),\n args = $ns['args'],\n _flags = 0;\n if(args.length>0){var flags=args[0]}\n else{var _flags = getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern, flags),\n jsmatch = string.match(jsp);\n if(jsmatch === null){return []}\n\n var _list = []\n for(var j = 0, _len_j = jsmatch.length; j < _len_j; j++) {\n var mo = {}\n mo._match=jsmatch[j]\n mo.group = function(){\n var res = []\n for(var i=0, _len_i = arguments.length; i < _len_i;i++){\n if(jsmatch[arguments[i]] === undefined){res.push(None)}\n else{res.push(jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return tuple.$factory(res)\n }\n mo.groups = function(_default){\n if(_default === undefined){_default=None}\n var res = []\n for(var i = 1, _len_i = jsmatch.length; i < _len_i; i++){\n if(jsmatch[i] === undefined){res.push(_default)}\n else{res.push(jsmatch[i])}\n }\n return tuple.$factory(res)\n }\n mo.start = function(){return mo._match.index}\n mo.end = function(){return mo._match.length - mo._match.index}\n mo.string = string\n _list.push(JSObject.$factory(mo))\n }\n return _list\n }\n obj.search = function(pattern, string){\n var $ns = $B.args('re.search', 2,\n {pattern:null, string:null},['pattern', 'string'],\n arguments, {}, 'args', 'kw')\n var args = $ns['args']\n if(args.length>0){var flags = args[0]}\n else{var flags = getattr($ns['kw'], 'get')('flags', '')}\n flags = normflags(flags)\n var jsp = new RegExp(pattern,flags)\n var jsmatch = string.match(jsp)\n if(jsmatch === null){return None}\n var mo = new Object()\n mo.group = function(){\n var res = []\n for(var i = 0, _len_i = arguments.length; i < _len_i; i++){\n if(jsmatch[arguments[i]] === undefined){res.push(None)}\n else{res.push(jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return tuple.$factory(res)\n }\n mo.groups = function(_default){\n if(_default===undefined){_default=None}\n var res = []\n for(var i = 1, _len_i = jsmatch.length; i < _len_i; i++){\n if(jsmatch[i] === undefined){res.push(_default)}\n else{res.push(jsmatch[i])}\n }\n return tuple.$factory(res)\n }\n mo.start = function(){return jsmatch.index}\n mo.end = function(){return jsmatch.length - jsmatch.index}\n mo.string = string\n return JSObject.$factory(mo)\n }\n obj.sub = function(pattern, repl, string){\n var $ns=$B.args('re.search', 3,\n {pattern: null, repl: null, string: null},\n ['pattern', 'repl', 'string'],\n arguments,{}, 'args', 'kw')\n for($var in $ns){eval(\"var \" + $var + \"=$ns[$var]\")}\n var args = $ns['args']\n var count = _b_.dict.get($ns['kw'], 'count', 0)\n var flags = _b_.dict.get($ns['kw'], 'flags', '')\n if(args.length > 0){var count = args[0]}\n if(args.length > 1){var flags = args[1]}\n flags = normflags(flags)\n if(typeof repl == \"string\"){\n // backreferences are \\1, \\2... in Python but $1,$2... in Javascript\n repl = repl.replace(/\\\\(\\d+)/g, '$$$1')\n }else if(typeof repl == \"function\"){\n // the argument passed to the Python function is the match object\n // the arguments passed to the Javascript function are :\n // - the matched substring\n // - the matched groups\n // - the offset of the matched substring inside the string\n // - the string being examined\n var $repl1 = function(){\n var mo = Object()\n mo.string = arguments[arguments.length - 1]\n var matched = arguments[0];\n var start = arguments[arguments.length - 2]\n var end = start + matched.length\n mo.start = function(){return start}\n mo.end = function(){return end}\n groups = []\n for(var i = 1, _len_i = arguments.length-2; i < _len_i; i++){\n groups.push(arguments[i])\n }\n mo.groups = function(_default){\n if(_default === undefined){_default = None}\n var res = []\n for(var i = 0, _len_i = groups.length; i < _len_i; i++){\n if(groups[i] === undefined){res.push(_default)}\n else{res.push(groups[i])}\n }\n return res\n }\n mo.group = function(i){\n if(i==0){return matched}\n return groups[i-1]\n }\n return repl(JSObject.$factory(mo))\n }\n }\n if(count == 0){flags += 'g'}\n var jsp = new RegExp(pattern, flags)\n if(typeof repl == 'function'){return string.replace(jsp, $repl1)}\n else{return string.replace(jsp, repl)}\n }\n obj.match = (function(search_func){\n return function(){\n // match is like search but pattern must start with ^\n var pattern = arguments[0]\n if(pattern.charAt(0) != '^'){pattern = '^'+pattern}\n var args = [pattern]\n for(var i = 1, _len_i = arguments.length; i < _len_i; i++){\n args.push(arguments[i])\n }\n return search_func.apply(null, args)\n }\n })(obj.search)\n\n return obj\n}\n)(__BRYTHON__)\n"], "_locale": [".js", "var am = {\n \"C\": \"AM\",\n \"aa\": \"saaku\",\n \"ab\": \"AM\",\n \"ae\": \"AM\",\n \"af\": \"vm.\",\n \"ak\": \"AN\",\n \"am\": \"\\u1325\\u12cb\\u1275\",\n \"an\": \"AM\",\n \"ar\": \"\\u0635\",\n \"as\": \"\\u09f0\\u09be\\u09a4\\u09bf\\u09aa\\u09c1\",\n \"av\": \"AM\",\n \"ay\": \"AM\",\n \"az\": \"AM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"AM\",\n \"bi\": \"AM\",\n \"bm\": \"AM\",\n \"bn\": \"AM\",\n \"bo\": \"\\u0f66\\u0f94\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"A.M.\",\n \"bs\": \"prijepodne\",\n \"ca\": \"a. m.\",\n \"ce\": \"AM\",\n \"ch\": \"AM\",\n \"co\": \"\",\n \"cr\": \"AM\",\n \"cs\": \"dop.\",\n \"cu\": \"\\u0414\\u041f\",\n \"cv\": \"AM\",\n \"cy\": \"yb\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u0786\",\n \"dz\": \"\\u0f66\\u0f94\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u014bdi\",\n \"el\": \"\\u03c0\\u03bc\",\n \"en\": \"AM\",\n \"eo\": \"atm\",\n \"es\": \"\",\n \"et\": \"AM\",\n \"eu\": \"AM\",\n \"fa\": \"\\u0642.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ap.\",\n \"fj\": \"AM\",\n \"fo\": \"um fyr.\",\n \"fr\": \"\",\n \"fy\": \"AM\",\n \"ga\": \"r.n.\",\n \"gd\": \"m\",\n \"gl\": \"a.m.\",\n \"gn\": \"a.m.\",\n \"gu\": \"\\u0aaa\\u0ac2\\u0ab0\\u0acd\\u0ab5\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"a.m.\",\n \"ha\": \"AM\",\n \"he\": \"AM\",\n \"hi\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"AM\",\n \"hr\": \"\",\n \"ht\": \"AM\",\n \"hu\": \"de.\",\n \"hy\": \"\",\n \"hz\": \"AM\",\n \"ia\": \"a.m.\",\n \"id\": \"AM\",\n \"ie\": \"AM\",\n \"ig\": \"A.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"ik\": \"AM\",\n \"io\": \"AM\",\n \"is\": \"f.h.\",\n \"it\": \"\",\n \"iu\": \"AM\",\n \"ja\": \"\\u5348\\u524d\",\n \"jv\": \"\",\n \"ka\": \"AM\",\n \"kg\": \"AM\",\n \"ki\": \"Kiroko\",\n \"kj\": \"AM\",\n \"kk\": \"AM\",\n \"kl\": \"\",\n \"km\": \"\\u1796\\u17d2\\u179a\\u17b9\\u1780\",\n \"kn\": \"\\u0caa\\u0cc2\\u0cb0\\u0ccd\\u0cb5\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\uc804\",\n \"kr\": \"AM\",\n \"ks\": \"AM\",\n \"ku\": \"\\u067e.\\u0646\",\n \"kv\": \"AM\",\n \"kw\": \"a.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"AM\",\n \"li\": \"AM\",\n \"ln\": \"nt\\u0254\\u0301ng\\u0254\\u0301\",\n \"lo\": \"\\u0e81\\u0ec8\\u0ead\\u0e99\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"prie\\u0161piet\",\n \"lu\": \"Dinda\",\n \"lv\": \"priek\\u0161p.\",\n \"mg\": \"AM\",\n \"mh\": \"AM\",\n \"mi\": \"a.m.\",\n \"mk\": \"\\u043f\\u0440\\u0435\\u0442\\u043f\\u043b.\",\n \"ml\": \"AM\",\n \"mn\": \"??\",\n \"mo\": \"AM\",\n \"mr\": \"\\u092e.\\u092a\\u0942.\",\n \"ms\": \"PG\",\n \"mt\": \"AM\",\n \"my\": \"\\u1014\\u1036\\u1014\\u1000\\u103a\",\n \"na\": \"AM\",\n \"nb\": \"a.m.\",\n \"nd\": \"AM\",\n \"ne\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"AM\",\n \"nl\": \"\",\n \"nn\": \"f.m.\",\n \"no\": \"a.m.\",\n \"nr\": \"AM\",\n \"nv\": \"AM\",\n \"ny\": \"AM\",\n \"oc\": \"AM\",\n \"oj\": \"AM\",\n \"om\": \"WD\",\n \"or\": \"AM\",\n \"os\": \"AM\",\n \"pa\": \"\\u0a38\\u0a35\\u0a47\\u0a30\",\n \"pi\": \"AM\",\n \"pl\": \"AM\",\n \"ps\": \"\\u063a.\\u0645.\",\n \"pt\": \"\",\n \"qu\": \"a.m.\",\n \"rc\": \"AM\",\n \"rm\": \"AM\",\n \"rn\": \"Z.MU.\",\n \"ro\": \"a.m.\",\n \"ru\": \"\",\n \"rw\": \"AM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u0942\\u0930\\u094d\\u0935\",\n \"sc\": \"AM\",\n \"sd\": \"AM\",\n \"se\": \"i.b.\",\n \"sg\": \"ND\",\n \"sh\": \"AM\",\n \"si\": \"\\u0db4\\u0dd9.\\u0dc0.\",\n \"sk\": \"AM\",\n \"sl\": \"dop.\",\n \"sm\": \"AM\",\n \"sn\": \"AM\",\n \"so\": \"sn.\",\n \"sq\": \"e paradites\",\n \"sr\": \"pre podne\",\n \"ss\": \"AM\",\n \"st\": \"AM\",\n \"su\": \"AM\",\n \"sv\": \"\",\n \"sw\": \"AM\",\n \"ta\": \"\\u0b95\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c2a\\u0c42\\u0c30\\u0c4d\\u0c35\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"AM\",\n \"ti\": \"\\u1295\\u1309\\u1206 \\u1230\\u12d3\\u1270\",\n \"tk\": \"\",\n \"tl\": \"AM\",\n \"tn\": \"AM\",\n \"to\": \"AM\",\n \"tr\": \"\\u00d6\\u00d6\",\n \"ts\": \"AM\",\n \"tt\": \"\",\n \"tw\": \"AM\",\n \"ty\": \"AM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0628?\\u0631?\\u0646\",\n \"uk\": \"AM\",\n \"ur\": \"AM\",\n \"uz\": \"TO\",\n \"ve\": \"AM\",\n \"vi\": \"SA\",\n \"vo\": \"AM\",\n \"wa\": \"AM\",\n \"wo\": \"\",\n \"xh\": \"AM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"yo\": \"\\u00c0\\u00e1r?`\",\n \"za\": \"AM\",\n \"zh\": \"\\u4e0a\\u5348\",\n \"zu\": \"AM\"\n}\nvar pm = {\n \"C\": \"PM\",\n \"aa\": \"carra\",\n \"ab\": \"PM\",\n \"ae\": \"PM\",\n \"af\": \"nm.\",\n \"ak\": \"EW\",\n \"am\": \"\\u12a8\\u1230\\u12d3\\u1275\",\n \"an\": \"PM\",\n \"ar\": \"\\u0645\",\n \"as\": \"\\u0986\\u09ac\\u09c7\\u09b2\\u09bf\",\n \"av\": \"PM\",\n \"ay\": \"PM\",\n \"az\": \"PM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"PM\",\n \"bi\": \"PM\",\n \"bm\": \"PM\",\n \"bn\": \"PM\",\n \"bo\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"G.M.\",\n \"bs\": \"popodne\",\n \"ca\": \"p. m.\",\n \"ce\": \"PM\",\n \"ch\": \"PM\",\n \"co\": \"\",\n \"cr\": \"PM\",\n \"cs\": \"odp.\",\n \"cu\": \"\\u041f\\u041f\",\n \"cv\": \"PM\",\n \"cy\": \"yh\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u078a\",\n \"dz\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u0263etr\\u0254\",\n \"el\": \"\\u03bc\\u03bc\",\n \"en\": \"PM\",\n \"eo\": \"ptm\",\n \"es\": \"\",\n \"et\": \"PM\",\n \"eu\": \"PM\",\n \"fa\": \"\\u0628.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ip.\",\n \"fj\": \"PM\",\n \"fo\": \"um sein.\",\n \"fr\": \"\",\n \"fy\": \"PM\",\n \"ga\": \"i.n.\",\n \"gd\": \"f\",\n \"gl\": \"p.m.\",\n \"gn\": \"p.m.\",\n \"gu\": \"\\u0a89\\u0aa4\\u0acd\\u0aa4\\u0ab0\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"p.m.\",\n \"ha\": \"PM\",\n \"he\": \"PM\",\n \"hi\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"PM\",\n \"hr\": \"\",\n \"ht\": \"PM\",\n \"hu\": \"du.\",\n \"hy\": \"\",\n \"hz\": \"PM\",\n \"ia\": \"p.m.\",\n \"id\": \"PM\",\n \"ie\": \"PM\",\n \"ig\": \"P.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"ik\": \"PM\",\n \"io\": \"PM\",\n \"is\": \"e.h.\",\n \"it\": \"\",\n \"iu\": \"PM\",\n \"ja\": \"\\u5348\\u5f8c\",\n \"jv\": \"\",\n \"ka\": \"PM\",\n \"kg\": \"PM\",\n \"ki\": \"Hwa\\u0129-in\\u0129\",\n \"kj\": \"PM\",\n \"kk\": \"PM\",\n \"kl\": \"\",\n \"km\": \"\\u179b\\u17d2\\u1784\\u17b6\\u1785\",\n \"kn\": \"\\u0c85\\u0caa\\u0cb0\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\ud6c4\",\n \"kr\": \"PM\",\n \"ks\": \"PM\",\n \"ku\": \"\\u062f.\\u0646\",\n \"kv\": \"PM\",\n \"kw\": \"p.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"PM\",\n \"li\": \"PM\",\n \"ln\": \"mp\\u00f3kwa\",\n \"lo\": \"\\u0eab\\u0ebc\\u0eb1\\u0e87\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"popiet\",\n \"lu\": \"Dilolo\",\n \"lv\": \"p\\u0113cp.\",\n \"mg\": \"PM\",\n \"mh\": \"PM\",\n \"mi\": \"p.m.\",\n \"mk\": \"\\u043f\\u043e\\u043f\\u043b.\",\n \"ml\": \"PM\",\n \"mn\": \"?\\u0425\",\n \"mo\": \"PM\",\n \"mr\": \"\\u092e.\\u0928\\u0902.\",\n \"ms\": \"PTG\",\n \"mt\": \"PM\",\n \"my\": \"\\u100a\\u1014\\u1031\",\n \"na\": \"PM\",\n \"nb\": \"p.m.\",\n \"nd\": \"PM\",\n \"ne\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"PM\",\n \"nl\": \"\",\n \"nn\": \"e.m.\",\n \"no\": \"p.m.\",\n \"nr\": \"PM\",\n \"nv\": \"PM\",\n \"ny\": \"PM\",\n \"oc\": \"PM\",\n \"oj\": \"PM\",\n \"om\": \"WB\",\n \"or\": \"PM\",\n \"os\": \"PM\",\n \"pa\": \"\\u0a36\\u0a3e\\u0a2e\",\n \"pi\": \"PM\",\n \"pl\": \"PM\",\n \"ps\": \"\\u063a.\\u0648.\",\n \"pt\": \"\",\n \"qu\": \"p.m.\",\n \"rc\": \"PM\",\n \"rm\": \"PM\",\n \"rn\": \"Z.MW.\",\n \"ro\": \"p.m.\",\n \"ru\": \"\",\n \"rw\": \"PM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u091a\\u094d\\u092f\\u093e\\u0924\",\n \"sc\": \"PM\",\n \"sd\": \"PM\",\n \"se\": \"e.b.\",\n \"sg\": \"LK\",\n \"sh\": \"PM\",\n \"si\": \"\\u0db4.\\u0dc0.\",\n \"sk\": \"PM\",\n \"sl\": \"pop.\",\n \"sm\": \"PM\",\n \"sn\": \"PM\",\n \"so\": \"gn.\",\n \"sq\": \"e pasdites\",\n \"sr\": \"po podne\",\n \"ss\": \"PM\",\n \"st\": \"PM\",\n \"su\": \"PM\",\n \"sv\": \"\",\n \"sw\": \"PM\",\n \"ta\": \"\\u0bae\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c05\\u0c2a\\u0c30\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"PM\",\n \"ti\": \"\\u12f5\\u1215\\u122d \\u1230\\u12d3\\u1275\",\n \"tk\": \"\",\n \"tl\": \"PM\",\n \"tn\": \"PM\",\n \"to\": \"PM\",\n \"tr\": \"\\u00d6S\",\n \"ts\": \"PM\",\n \"tt\": \"\",\n \"tw\": \"PM\",\n \"ty\": \"PM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0643?\\u064a\\u0649\\u0646\",\n \"uk\": \"PM\",\n \"ur\": \"PM\",\n \"uz\": \"TK\",\n \"ve\": \"PM\",\n \"vi\": \"CH\",\n \"vo\": \"PM\",\n \"wa\": \"PM\",\n \"wo\": \"\",\n \"xh\": \"PM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"yo\": \"?`s\\u00e1n\",\n \"za\": \"PM\",\n \"zh\": \"\\u4e0b\\u5348\",\n \"zu\": \"PM\"\n}\n\nvar X_format = {\n \"%H:%M:%S\": [\n \"C\",\n \"ab\",\n \"ae\",\n \"af\",\n \"an\",\n \"av\",\n \"ay\",\n \"az\",\n \"ba\",\n \"be\",\n \"bg\",\n \"bh\",\n \"bi\",\n \"bm\",\n \"bo\",\n \"br\",\n \"bs\",\n \"ca\",\n \"ce\",\n \"ch\",\n \"co\",\n \"cr\",\n \"cs\",\n \"cu\",\n \"cv\",\n \"cy\",\n \"da\",\n \"de\",\n \"dv\",\n \"eo\",\n \"es\",\n \"et\",\n \"eu\",\n \"ff\",\n \"fj\",\n \"fo\",\n \"fr\",\n \"fy\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gu\",\n \"gv\",\n \"ha\",\n \"he\",\n \"hi\",\n \"ho\",\n \"hr\",\n \"ht\",\n \"hu\",\n \"hy\",\n \"hz\",\n \"ia\",\n \"ie\",\n \"ig\",\n \"ik\",\n \"io\",\n \"is\",\n \"it\",\n \"ja\",\n \"ka\",\n \"kg\",\n \"ki\",\n \"kj\",\n \"kk\",\n \"kl\",\n \"km\",\n \"kn\",\n \"kv\",\n \"kw\",\n \"ky\",\n \"la\",\n \"lb\",\n \"lg\",\n \"li\",\n \"ln\",\n \"lo\",\n \"lt\",\n \"lu\",\n \"lv\",\n \"mg\",\n \"mh\",\n \"mk\",\n \"mn\",\n \"mo\",\n \"mr\",\n \"mt\",\n \"my\",\n \"na\",\n \"nb\",\n \"nd\",\n \"ng\",\n \"nl\",\n \"nn\",\n \"no\",\n \"nr\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"or\",\n \"os\",\n \"pi\",\n \"pl\",\n \"ps\",\n \"pt\",\n \"rc\",\n \"rm\",\n \"rn\",\n \"ro\",\n \"ru\",\n \"rw\",\n \"sa\",\n \"sc\",\n \"se\",\n \"sg\",\n \"sh\",\n \"sk\",\n \"sl\",\n \"sm\",\n \"sn\",\n \"sr\",\n \"ss\",\n \"st\",\n \"su\",\n \"sv\",\n \"sw\",\n \"ta\",\n \"te\",\n \"tg\",\n \"th\",\n \"tk\",\n \"tl\",\n \"tn\",\n \"tr\",\n \"ts\",\n \"tt\",\n \"tw\",\n \"ty\",\n \"ug\",\n \"uk\",\n \"uz\",\n \"ve\",\n \"vo\",\n \"wa\",\n \"wo\",\n \"xh\",\n \"yo\",\n \"za\",\n \"zh\",\n \"zu\"\n ],\n \"%i:%M:%S %p\": [\n \"aa\",\n \"ak\",\n \"am\",\n \"bn\",\n \"el\",\n \"en\",\n \"iu\",\n \"kr\",\n \"ks\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"ne\",\n \"om\",\n \"sd\",\n \"so\",\n \"sq\",\n \"ti\",\n \"to\",\n \"ur\",\n \"vi\"\n ],\n \"%I:%M:%S %p\": [\n \"ar\",\n \"fa\",\n \"ku\",\n \"qu\"\n ],\n \"%p %i:%M:%S\": [\n \"as\",\n \"ii\",\n \"ko\",\n \"yi\"\n ],\n \"\\u0f46\\u0f74\\u0f0b\\u0f5a\\u0f7c\\u0f51\\u0f0b%i:%M:%S %p\": [\n \"dz\"\n ],\n \"%p ga %i:%M:%S\": [\n \"ee\"\n ],\n \"%H.%M.%S\": [\n \"fi\",\n \"id\",\n \"jv\",\n \"oc\",\n \"si\"\n ],\n \"%p %I:%M:%S\": [\n \"pa\"\n ]\n}\nvar x_format = {\n \"%m/%d/%y\": [\n \"C\"\n ],\n \"%d/%m/%Y\": [\n \"aa\",\n \"am\",\n \"bm\",\n \"bn\",\n \"ca\",\n \"co\",\n \"cy\",\n \"el\",\n \"es\",\n \"ff\",\n \"fr\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gv\",\n \"ha\",\n \"he\",\n \"id\",\n \"ig\",\n \"it\",\n \"iu\",\n \"jv\",\n \"ki\",\n \"kr\",\n \"kw\",\n \"la\",\n \"lg\",\n \"ln\",\n \"lo\",\n \"lu\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"mt\",\n \"nd\",\n \"oc\",\n \"om\",\n \"pt\",\n \"qu\",\n \"rn\",\n \"sd\",\n \"sg\",\n \"so\",\n \"sw\",\n \"ti\",\n \"to\",\n \"uk\",\n \"ur\",\n \"uz\",\n \"vi\",\n \"wo\",\n \"yo\"\n ],\n \"%m/%d/%Y\": [\n \"ab\",\n \"ae\",\n \"an\",\n \"av\",\n \"ay\",\n \"bh\",\n \"bi\",\n \"ch\",\n \"cr\",\n \"cv\",\n \"ee\",\n \"en\",\n \"fj\",\n \"ho\",\n \"ht\",\n \"hz\",\n \"ie\",\n \"ik\",\n \"io\",\n \"kg\",\n \"kj\",\n \"ks\",\n \"kv\",\n \"li\",\n \"mh\",\n \"mo\",\n \"na\",\n \"ne\",\n \"ng\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"pi\",\n \"rc\",\n \"sc\",\n \"sh\",\n \"sm\",\n \"su\",\n \"tl\",\n \"tw\",\n \"ty\",\n \"wa\",\n \"za\",\n \"zu\"\n ],\n \"%Y-%m-%d\": [\n \"af\",\n \"br\",\n \"ce\",\n \"dz\",\n \"eo\",\n \"ko\",\n \"lt\",\n \"mg\",\n \"nr\",\n \"rw\",\n \"se\",\n \"si\",\n \"sn\",\n \"ss\",\n \"st\",\n \"sv\",\n \"tn\",\n \"ts\",\n \"ug\",\n \"ve\",\n \"vo\",\n \"xh\"\n ],\n \"%Y/%m/%d\": [\n \"ak\",\n \"bo\",\n \"eu\",\n \"ia\",\n \"ii\",\n \"ja\",\n \"ku\",\n \"yi\",\n \"zh\"\n ],\n \"null\": [\n \"ar\",\n \"fa\",\n \"ps\",\n \"th\"\n ],\n \"%d-%m-%Y\": [\n \"as\",\n \"da\",\n \"fy\",\n \"hi\",\n \"kl\",\n \"mr\",\n \"my\",\n \"nl\",\n \"rm\",\n \"sa\",\n \"ta\"\n ],\n \"%d.%m.%Y\": [\n \"az\",\n \"cs\",\n \"de\",\n \"et\",\n \"fi\",\n \"fo\",\n \"hy\",\n \"is\",\n \"ka\",\n \"kk\",\n \"lv\",\n \"mk\",\n \"nb\",\n \"nn\",\n \"no\",\n \"os\",\n \"pl\",\n \"ro\",\n \"ru\",\n \"sq\",\n \"tg\",\n \"tr\",\n \"tt\"\n ],\n \"%d.%m.%y\": [\n \"ba\",\n \"be\",\n \"lb\"\n ],\n \"%d.%m.%Y \\u0433.\": [\n \"bg\"\n ],\n \"%d.%m.%Y.\": [\n \"bs\",\n \"hr\",\n \"sr\"\n ],\n \"%Y.%m.%d\": [\n \"cu\",\n \"mn\"\n ],\n \"%d/%m/%y\": [\n \"dv\",\n \"km\"\n ],\n \"%d-%m-%y\": [\n \"gu\",\n \"kn\",\n \"or\",\n \"pa\",\n \"te\"\n ],\n \"%Y. %m. %d.\": [\n \"hu\"\n ],\n \"%d-%b %y\": [\n \"ky\"\n ],\n \"%d. %m. %Y\": [\n \"sk\",\n \"sl\"\n ],\n \"%d.%m.%y \\u00fd.\": [\n \"tk\"\n ]\n}\n\n\nvar $module=(function($B){\n var _b_ = $B.builtins\n return {\n CHAR_MAX: 127,\n LC_ALL: 6,\n LC_COLLATE: 3,\n LC_CTYPE: 0,\n LC_MESSAGES: 5,\n LC_MONETARY: 4,\n LC_NUMERIC: 1,\n LC_TIME: 2,\n Error: _b_.ValueError,\n\n _date_format: function(spec, hour){\n var t,\n locale = __BRYTHON__.locale.substr(0, 2)\n\n if(spec == \"p\"){\n var res = hours < 12 ? am[locale] : pm[locale]\n if(res === undefined){\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n }\n return res\n }\n else if(spec == \"x\"){\n t = x_format\n }else if(spec == \"X\"){\n t = X_format\n }else{\n throw _b_.ValueError.$factory(\"invalid format\", spec)\n }\n for(var key in t){\n if(t[key].indexOf(locale) > -1){\n return key\n }\n }\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n },\n\n localeconv: function(){\n var conv = {'grouping': [127],\n 'currency_symbol': '',\n 'n_sign_posn': 127,\n 'p_cs_precedes': 127,\n 'n_cs_precedes': 127,\n 'mon_grouping': [],\n 'n_sep_by_space': 127,\n 'decimal_point': '.',\n 'negative_sign': '',\n 'positive_sign': '',\n 'p_sep_by_space': 127,\n 'int_curr_symbol': '',\n 'p_sign_posn': 127,\n 'thousands_sep': '',\n 'mon_thousands_sep': '',\n 'frac_digits': 127,\n 'mon_decimal_point': '',\n 'int_frac_digits': 127\n }\n var res = _b_.dict.$factory()\n res.$string_dict = conv\n return res\n },\n\n setlocale : function(){\n var $ = $B.args(\"setlocale\", 2, {category: null, locale: null},\n [\"category\", \"locale\"], arguments, {locale: _b_.None},\n null, null)\n /// XXX category is currently ignored\n if($.locale == \"\"){\n // use browser language setting, if it is set\n var LANG = ($B.language || \"\").substr(0, 2)\n if(am.hasOwnProperty(LANG)){\n $B.locale = LANG\n return LANG\n }else{\n console.log(\"Unknown locale: \" + LANG)\n }\n }else if($.locale === _b_.None){\n // return current locale\n return $B.locale\n }else{\n // Only use 2 first characters\n try{$.locale.substr(0, 2)}\n catch(err){\n throw $module.Error.$factory(\"Invalid locale: \" + $.locale)\n }\n if(am.hasOwnProperty($.locale.substr(0, 2))){\n $B.locale = $.locale\n return $.locale\n }else{\n throw $module.Error.$factory(\"Unknown locale: \" + $.locale)\n }\n }\n }\n }\n})(__BRYTHON__)\n"], "_multiprocessing": [".js", "// multiprocessing\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\nvar $s=[]\nfor(var $b in _b_) $s.push('var ' + $b +'=_b_[\"'+$b+'\"]')\neval($s.join(';'))\n\n//for(var $py_builtin in _b_){eval(\"var \"+$py_builtin+\"=_b_[$py_builtin]\")}\n\nvar Process = {\n __class__:_b_.type,\n __mro__: [_b_.object],\n $infos:{\n __name__:'Process'\n },\n $is_class: true\n}\n\nvar $convert_args=function(args) {\n var _list=[]\n for(var i=0, _len_i = args.length; i < _len_i; i++) {\n var _a=args[i]\n if(isinstance(_a, str)){_list.push(\"'\"+_a+\"'\")} else {_list.push(_a)}\n }\n\n return _list.join(',')\n}\n\nProcess.is_alive = function(self){return self.$alive}\n\nProcess.join = function(self, timeout){\n // need to block until process is complete\n // could probably use a addEventListener to execute all existing code\n // after this join statement\n\n self.$worker.addEventListener('message', function (e) {\n var data=e.data\n if (data.stdout != '') { // output stdout from process\n $B.stdout.write(data.stdout)\n }\n }, false);\n}\n\nProcess.run = function(self){\n //fix me\n}\n\nProcess.start = function(self){\n self.$worker.postMessage({target: self.$target,\n args: $convert_args(self.$args),\n // kwargs: self.$kwargs\n })\n self.$worker.addEventListener('error', function(e) { throw e})\n self.$alive=true\n}\n\nProcess.terminate = function(self){\n self.$worker.terminate()\n self.$alive=false\n}\n\n// variables\n//name\n//daemon\n//pid\n//exitcode\n\nProcess. $factory = function(){\n //arguments group=None, target=None, name=None, args=(), kwargs=()\n\n var $ns=$B.args('Process',0,{},[],arguments,{},null,'kw')\n var kw=$ns['kw']\n\n var target=_b_.dict.get($ns['kw'],'target',None)\n var args=_b_.dict.get($ns['kw'],'args',tuple.$factory())\n\n var worker = new Worker('/src/web_workers/multiprocessing.js')\n\n var res = {\n __class__:Process,\n $worker: worker,\n name: $ns['name'] || None,\n $target: target+'',\n $args: args,\n //$kwargs: $ns['kw'],\n $alive: false\n }\n return res\n}\n\n$B.set_func_names(Process, \"multiprocessing\")\n\nvar Pool = $B.make_class(\"Pool\")\n\nPool.__enter__ = function(self){}\nPool.__exit__ = function(self){}\n\nPool.__str__ = Pool.toString = Pool.__repr__=function(self){\n return ''\n}\n\nPool.map = function(){\n\n var $ns=$B.args('Pool.map', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func=$ns['func']\n var fargs=$ns['fargs']\n\n var _results=[]\n\n fargs=iter(fargs)\n\n var _pos=0\n console.log(self.$processes)\n _workers=[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg=getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n console.log(arg)\n _workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n _results[e.data.pos]=e.data.result\n if (_results.length == args.length) return _results\n\n try {\n arg=getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n}\n\nPool.apply_async = function(){\n\n var $ns=$B.$MakeArgs('apply_async', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func=$ns['func']\n var fargs=$ns['fargs']\n\n fargs=iter(fargs)\n\n async_result = {}\n async_result.get=function(timeout){\n console.log(results)\n console.log(fargs)\n return this.results}\n async_result.results=[]\n\n var _pos=0\n\n _workers=[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg=getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n //console.log(arg)\n //_workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n async_result.results[e.data.pos]=e.data.result\n //if (_results.length == args.length) return _results\n\n try {\n arg=getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n\n console.log(\"return\", async_result)\n return async_result\n}\n\nPool.$factory = function(){\n console.log(\"pool\")\n console.log(arguments)\n var $ns=$B.args('Pool',1,\n {processes:null},['processes'],arguments,{},'args','kw')\n //var kw=$ns['kw']\n\n var processes=$ns['processes']\n\n if (processes == None) {\n // look to see if we have stored cpu_count in local storage\n // maybe we should create a brython config file with settings,etc..??\n\n // if not there use a tool such as Core Estimator to calculate number of cpu's\n // http://eligrey.com/blog/post/cpu-core-estimation-with-javascript\n }\n\n console.log(processes)\n var res = {\n __class__:Pool,\n $processes:processes\n }\n return res\n}\n\n$B.set_func_names(Pool, \"multiprocessing\")\n\nreturn {Process:Process, Pool:Pool}\n\n})(__BRYTHON__)\n"], "_posixsubprocess": [".js", "var $module=(function($B){\n\n return {\n cloexec_pipe: function() {} // fixme\n }\n})(__BRYTHON__)\n"], "_profile": [".js", "// Private interface to the profiling instrumentation implemented in py_utils.js.\n// Uses local a copy of the eval function from py_builtin_functions.js\n\nvar $module=(function($B) {\n eval($B.InjectBuiltins());\n return {\n brython:$B,\n data:$B.$profile_data,\n start:$B.$profile.start,\n stop:$B.$profile.stop,\n pause:$B.$profile.pause,\n status:$B.$profile.status,\n clear:$B.$profile.clear,\n elapsed:$B.$profile.elapsed,\n run:function(src,_globals,_locals,nruns) {\n var current_frame = $B.frames_stack[$B.frames_stack.length-1]\n if(current_frame!==undefined){\n var current_locals_id = current_frame[0].replace(/\\./,'_'),\n current_globals_id = current_frame[2].replace(/\\./,'_')\n }\n\n var is_exec = true,\n leave = false\n\n // code will be run in a specific block\n var globals_id = '$profile_'+$B.UUID(),\n locals_id\n\n if(_locals===_globals){\n locals_id = globals_id\n }else{\n locals_id = '$profile_'+$B.UUID()\n }\n // Initialise the object for block namespaces\n eval('var $locals_'+globals_id+' = {}\\nvar $locals_'+locals_id+' = {}')\n\n // Initialise block globals\n\n // A _globals dictionary is provided, set or reuse its attribute\n // globals_id\n _globals.globals_id = _globals.globals_id || globals_id\n globals_id = _globals.globals_id\n\n if(_locals === _globals || _locals === undefined){\n locals_id = globals_id\n parent_scope = $B.builtins_scope\n }else{\n // The parent block of locals must be set to globals\n parent_scope = {\n id: globals_id,\n parent_block: $B.builtins_scope,\n binding: {}\n }\n for(var attr in _globals.$string_dict){\n parent_scope.binding[attr] = true\n }\n }\n\n // Initialise block globals\n if(_globals.$jsobj){var items = _globals.$jsobj}\n else{var items = _globals.$string_dict}\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + globals_id + '[\"' + item1 +\n '\"] = items[item]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n\n // Initialise block locals\n var items = _b_.dict.items(_locals), item\n if(_locals.$jsobj){var items = _locals.$jsobj}\n else{var items = _locals.$string_dict}\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + locals_id + '[\"' + item[0] + '\"] = item[1]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n //var nb_modules = Object.keys(__BRYTHON__.modules).length\n //console.log('before exec', nb_modules)\n\n console.log(\"call py2js\", src, globals_id, locals_id, parent_scope)\n var root = $B.py2js(src, globals_id, locals_id, parent_scope),\n js, gns, lns\n\n try{\n\n var js = root.to_js()\n\n var i,res,gns;\n for(i=0;i 255){return false}\n return unicode_iscased(cp)\n }\n\n function unicode_tolower(cp){\n var letter = String.fromCodePoint(cp),\n lower = letter.toLowerCase()\n return lower.charCodeAt(0)\n }\n\n function ascii_tolower(cp){\n return unicode_tolower(cp)\n }\n\nreturn {\n unicode_iscased: unicode_iscased,\n ascii_iscased: ascii_iscased,\n unicode_tolower: unicode_tolower,\n ascii_tolower: ascii_tolower\n}\n\n}\n\n)(__BRYTHON__)"], "_string": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins\n\nfunction parts(format_string){\n var result = [],\n _parts = $B.split_format(format_string) // defined in py_string.js\n for(var i = 0; i < _parts.length; i+= 2){\n result.push({pre: _parts[i], fmt: _parts[i + 1]})\n }\n return result\n}\n\nfunction Tuple(){\n var args = []\n for(var i=0, len=arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return _b_.tuple.$factory(args)\n}\n\nreturn{\n\n formatter_field_name_split: function(fieldname){\n // Split the argument as a field name\n var parsed = $B.parse_format(fieldname),\n first = parsed.name,\n rest = []\n if(first.match(/\\d+/)){first = parseInt(first)}\n parsed.name_ext.forEach(function(ext){\n if(ext.startsWith(\"[\")){\n var item = ext.substr(1, ext.length - 2)\n if(item.match(/\\d+/)){\n rest.push(Tuple(false, parseInt(item)))\n }else{\n rest.push(Tuple(false, item))\n }\n }else{\n rest.push(Tuple(true, ext.substr(1)))\n }\n })\n return Tuple(first, _b_.iter(rest))\n },\n formatter_parser: function(format_string){\n // Parse the argument as a format string\n\n if(! _b_.isinstance(format_string, _b_.str)){\n throw _b_.ValueError.$factory(\"Invalid format string type: \" +\n $B.class_name(format_string))\n }\n\n var result = []\n parts(format_string).forEach(function(item){\n var pre = item.pre === undefined ? \"\" : item.pre,\n fmt = item.fmt\n if(fmt === undefined){\n result.push(Tuple(pre, _b_.None, _b_.None, _b_.None))\n }else if(fmt.string == ''){\n result.push(Tuple(pre, '', '', _b_.None))\n }else{\n result.push(Tuple(pre,\n fmt.raw_name + fmt.name_ext.join(\"\"),\n fmt.raw_spec,\n fmt.conv || _b_.None))\n }\n })\n return result\n }\n}\n})(__BRYTHON__)"], "_strptime": [".js", "var _b_ = __BRYTHON__.builtins\n\nvar $module = (function($B){\n return {\n _strptime_datetime: function(cls, s, fmt){\n var pos_s = 0,\n pos_fmt = 0,\n dt = {}\n function error(){\n throw Error(\"no match \" + pos_s + \" \" + s.charAt(pos_s) + \" \"+\n pos_fmt + \" \" + fmt.charAt(pos_fmt))\n }\n\n var locale = __BRYTHON__.locale,\n shortdays = [],\n longdays = [],\n conv_func = locale == \"C\" ?\n function(d){return d.toDateString()} :\n function(d, options){\n return d.toLocaleDateString(locale, options)\n }\n\n for(var day = 16; day < 23; day++){\n var d = new Date(Date.UTC(2012, 11, day, 3, 0, 0))\n shortdays.push(conv_func(d, {weekday: 'short'}))\n longdays.push(conv_func(d, {weekday: 'long'}))\n }\n\n var shortmonths = [],\n longmonths = []\n\n for(var month = 0; month < 12; month++){\n var d = new Date(Date.UTC(2012, month, 1, 3, 0, 0))\n shortmonths.push(conv_func(d, {month: 'short'}))\n longmonths.push(conv_func(d, {month: 'long'}))\n }\n\n var shortdays_re = new RegExp(shortdays.join(\"|\").replace(\".\", \"\\\\.\")),\n longdays_re = new RegExp(longdays.join(\"|\")),\n shortmonths_re = new RegExp(shortmonths.join(\"|\").replace(\".\", \"\\\\.\")),\n longmonths_re = new RegExp(longmonths.join(\"|\"))\n\n var regexps = {\n d: [\"day\", new RegExp(\"0[1-9]|[123][0-9]\")],\n H: [\"hour\", new RegExp(\"[01][0-9]|2[0-3]|\\\\d\")],\n I: [\"hour\", new RegExp(\"0[0-9]|1[0-2]\")],\n m: [\"month\", new RegExp(\"0[1-9]|1[012]\")],\n M: [\"minute\", new RegExp(\"[0-5][0-9]\")],\n S: [\"second\", new RegExp(\"([1-5]\\\\d)|(0?\\\\d)\")],\n y: [\"year\", new RegExp(\"0{0,2}\\\\d{2}\")],\n Y: [\"year\", new RegExp(\"\\\\d{4}\")]\n }\n\n while(pos_fmt < fmt.length){\n var car = fmt.charAt(pos_fmt)\n if(car == \"%\"){\n var spec = fmt.charAt(pos_fmt + 1),\n regexp = regexps[spec]\n if(regexp !== undefined){\n var re = regexp[1],\n attr = regexp[0],\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error()\n }else{\n if(dt[attr] !== undefined){\n throw Error(attr + \" is defined more than once\")\n }else{\n dt[attr] = parseInt(res[0])\n pos_fmt += 2\n pos_s += res[0].length\n }\n }\n }else if(spec == \"a\" || spec == \"A\"){\n // Locale's abbreviated (a) or full (A) weekday name\n var attr = \"weekday\",\n re = spec == \"a\" ? shortdays_re : longdays_re,\n t = spec == \"a\" ? shortdays : longdays\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error()\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n if(dt.weekday !== undefined){\n throw Error(attr + \" is defined more than once\")\n }else{\n dt.weekday = ix\n }\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"b\" || spec == \"B\"){\n // Locales's abbreviated (b) or full (B) month\n var attr = \"month\",\n re = spec == \"b\" ? shortmonths_re : longmonths_re,\n t = spec == \"b\" ? shortmonths : longmonths,\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error()\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n if(dt.month !== undefined){\n throw Error(attr + \" is defined more than once\")\n }else{\n dt.month = ix + 1\n }\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"c\"){\n // Locale's appropriate date and time representation\n var fmt1 = fmt.substr(0, pos_fmt - 1) + _locale_c_format() +\n fmt.substr(pos_fmt + 2)\n console.log(\"fmt1\", fmt1)\n fmt = fmt1\n }else if(spec == \"%\"){\n if(s.charAt(pos_s) == \"%\"){\n pos_fmt++\n pos_s++\n }else{\n error()\n }\n }else{\n pos_fmt++\n }\n }else{\n if(car == s.charAt(pos_s)){\n pos_fmt++\n pos_s++\n }else{\n error()\n }\n }\n }\n return $B.$call(cls)(dt.year, dt.month, dt.day,\n dt.hour || 0, dt.minute || 0, dt.second || 0)\n }\n }\n})(__BRYTHON__)\n"], "_svg": [".js", "// creation of an HTML element\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\nvar TagSum = $B.TagSum // defined in py_dom.js\n\nvar $s=[]\nfor(var $b in _b_) $s.push('var ' + $b +' = _b_[\"' + $b + '\"]')\neval($s.join(';'))\n\nvar $svgNS = \"http://www.w3.org/2000/svg\"\nvar $xlinkNS = \"http://www.w3.org/1999/xlink\"\n\nfunction makeTagDict(tagName){\n // return the dictionary for the class associated with tagName\n var dict = $B.make_class(tagName)\n\n dict.__init__ = function(){\n var $ns = $B.args('__init__', 1, {self: null}, ['self'],\n arguments, {}, 'args', 'kw'),\n self = $ns['self'],\n args = $ns['args']\n if(args.length == 1){\n var first = args[0]\n if(isinstance(first, [str, int, float])){\n self.elt.appendChild(document.createTextNode(str.$factory(first)))\n }else if(first.__class__ === TagSum){\n for(var i = 0, len = first.children.length; i < len; i++){\n self.elt.appendChild(first.children[i].elt)\n }\n }else{ // argument is another DOMNode instance\n try{self.elt.appendChild(first.elt)}\n catch(err){throw ValueError.$factory('wrong element ' + first)}\n }\n }\n\n // attributes\n var items = _b_.list.$factory(_b_.dict.items($ns['kw']))\n for(var i = 0, len = items.length; i < len; i++){\n // keyword arguments\n var arg = items[i][0],\n value = items[i][1]\n if(arg.toLowerCase().substr(0,2) == \"on\"){\n // Event binding passed as argument \"onclick\", \"onfocus\"...\n // Better use method bind of DOMNode objects\n var js = '$B.DOMNode.bind(self,\"' +\n arg.toLowerCase().substr(2)\n eval(js+'\",function(){'+value+'})')\n }else if(arg.toLowerCase() == \"style\"){\n $B.DOMNode.set_style(self,value)\n }else if(arg.toLowerCase().indexOf(\"href\") !== -1){ // xlink:href\n self.elt.setAttributeNS( \"http://www.w3.org/1999/xlink\",\n \"href\",value)\n }else{\n if(value !== false){\n // option.selected=false sets it to true :-)\n try{\n arg = arg.replace('_', '-')\n self.elt.setAttributeNS(null, arg, value)\n }catch(err){\n throw ValueError.$factory(\"can't set attribute \" + arg)\n }\n }\n }\n }\n }\n\n dict.__mro__ = [$B.DOMNode, $B.builtins.object]\n\n dict.__new__ = function(cls){\n var res = $B.DOMNode.$factory(document.createElementNS($svgNS, tagName))\n res.__class__ = cls\n return res\n }\n\n dict.$factory = function(){\n var res = $B.DOMNode.$factory(\n document.createElementNS($svgNS, tagName))\n res.__class__ = dict\n // apply __init__\n dict.__init__(res, ...arguments)\n return res\n }\n\n $B.set_func_names(dict, \"browser.svg\")\n\n return dict\n}\n\n\n// SVG\nvar $svg_tags = ['a',\n'altGlyph',\n'altGlyphDef',\n'altGlyphItem',\n'animate',\n'animateColor',\n'animateMotion',\n'animateTransform',\n'circle',\n'clipPath',\n'color_profile', // instead of color-profile\n'cursor',\n'defs',\n'desc',\n'ellipse',\n'feBlend',\n'foreignObject', //patch to enable foreign objects\n'g',\n'image',\n'line',\n'linearGradient',\n'marker',\n'mask',\n'path',\n'pattern',\n'polygon',\n'polyline',\n'radialGradient',\n'rect',\n'set',\n'stop',\n'svg',\n'text',\n'tref',\n'tspan',\n'use']\n\n// create classes\nvar obj = new Object()\nvar dicts = {}\nfor(var i = 0, len = $svg_tags.length; i < len; i++){\n var tag = $svg_tags[i]\n obj[tag] = makeTagDict(tag)\n}\nreturn obj\n})(__BRYTHON__)\n"], "_warnings": [".js", "var $module = (function($B){\n\n_b_ = $B.builtins\n\nreturn {\n __doc__: \"_warnings provides basic warning filtering support.\\n \" +\n \"It is a helper module to speed up interpreter start-up.\",\n\n default_action: \"default\",\n\n filters: [\n ['ignore', _b_.None, _b_.DeprecationWarning, _b_.None, 0],\n ['ignore', _b_.None, _b_.PendingDeprecationWarning, _b_.None, 0],\n ['ignore', _b_.None, _b_.ImportWarning, _b_.None, 0],\n ['ignore', _b_.None, _b_.BytesWarning, _b_.None, 0]].map(\n function(x){return _b_.tuple.$factory(x)}),\n\n once_registry: _b_.dict.$factory(),\n\n warn: function(){},\n\n warn_explicit: function(){}\n\n}\n\n})(__BRYTHON__)\n"], "_webworker": [".js", "// Web Worker implementation\n\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\n\nvar brython_scripts = //['brython', 'brython_stdlib']\n[\n 'unicode.min',\n 'brython_builtins', 'version_info', 'py2js', 'loaders',\n 'py_object', 'py_type', 'py_utils', 'py_builtin_functions',\n 'py_exceptions', 'py_range_slice', 'py_bytes', 'py_set', 'js_objects',\n 'stdlib_paths', 'py_import', 'py_float', 'py_int', 'py_long_int',\n 'py_complex', 'py_sort', 'py_list', 'py_string', 'py_dict',\n 'py_dom', 'py_generator', 'builtin_modules', 'py_import_hooks',\n 'async'\n ]\nvar wclass = $B.make_class(\"Worker\",\n function(worker){\n return {\n __class__: wclass,\n js: worker\n }\n }\n)\nwclass.__mro__ = [$B.JSObject, _b_.object]\nwclass.send = function(self){\n var $ = $B.args(\"send\", 1, {self: null}, [\"self\"], arguments, {}, \"args\",\n null),\n args = $.args\n for(var i = 0, len = args.length; i < len; i++){\n args[i] = $B.pyobj2structuredclone(args[i])\n }\n self.js.postMessage.apply(self.js, args)\n}\n\n$B.set_func_names(wclass, \"browser.worker\")\n\nvar _Worker = $B.make_class(\"Worker\", function(id, onmessage, onerror){\n var $ = $B.args(\"__init__\", 3, {id: null, onmessage: null, onerror: null},\n ['id', 'onmessage', 'onerror'], arguments,\n {onmessage: _b_.None, onerror: _b_.None}, null, null),\n id = $.id,\n src = $B.webworkers[id]\n if(src === undefined){\n throw _b_.KeyError.$factory(id)\n }\n var script_id = \"worker\" + $B.UUID(),\n js = __BRYTHON__.imported.javascript.py2js(src,\n script_id),\n header = 'var $locals_' + script_id +' = {}\\n';\n brython_scripts.forEach(function(script){\n var url = $B.brython_path + script + \".js?\" +\n (new Date()).getTime()\n header += 'importScripts(\"' + url + '\")\\n'\n })\n // restore brython_path\n header += '__BRYTHON__.brython_path = \"' + $B.brython_path +\n '\"\\n'\n // Call brython() to initialize internal Brython values\n header += 'brython(1)\\n'\n js = header + js\n var blob = new Blob([js], {type: \"application/js\"}),\n url = URL.createObjectURL(blob),\n w = new Worker(url),\n res = wclass.$factory(w)\n return res\n})\n\nreturn {\n Worker: _Worker\n}\n\n})(__BRYTHON__)\n"], "_zlib_utils": [".js", "\nfunction rfind(buf, seq){\n var buflen = buf.length,\n len = seq.length\n for(var i = buflen - len; i >= 0; i--){\n var chunk = buf.slice(i, i + len),\n found = true\n for(var j = 0; j < len; j++){\n if(chunk[j] != seq[j]){\n found = false\n break\n }\n }\n if(found){return i}\n }\n return -1\n}\n\nvar $module = (function($B){\n\n return {\n lz_generator: function(text, size, min_len){\n /*\n Returns a list of items based on the LZ algorithm, using the\n specified window size and a minimum match length.\n The items are a tuple (length, distance) if a match has been\n found, and a byte otherwise.\n */\n // 'text' is an instance of Python 'bytes' class, the actual\n // bytes are in text.source\n text = text.source\n if(min_len === undefined){\n min_len = 3\n }\n var pos = 0, // position in text\n items = [] // returned items\n while(pos < text.length){\n sequence = text.slice(pos, pos + min_len)\n if(sequence.length < 3){\n for(var i = pos; i < text.length; i++){\n items.push(text[i])\n }\n break\n }\n // Search the sequence in the 'size' previous bytes\n buf = text.slice(pos - size, pos)\n buf_pos = rfind(buf, sequence)\n if(buf_pos > -1){\n // Match of length 3 found; search a longer one\n var len = 1\n while(len < 259 &&\n buf_pos + len < buf.length &&\n pos + len < text.length &&\n text[pos + len] == buf[buf_pos + len]){\n len += 1\n }\n match = text.slice(pos, pos + len)\n // \"Lazy matching\": search longer match starting at next\n // position\n longer_match = false\n if(pos + len < text.length - 2){\n match2 = text.slice(pos + 1, pos + len + 2)\n longer_buf_pos = rfind(buf, match2)\n if(longer_buf_pos > -1){\n // found longer match : emit current byte as\n // literal and move 1 byte forward\n longer_match = true\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n if(! longer_match){\n distance = buf.length - buf_pos\n items.push($B.fast_tuple([len, distance]))\n if(pos + len == text.length){\n break\n }else{\n pos += len\n items.push(text[pos])\n pos += 1\n }\n }\n }else{\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n return items\n }\n }\n})(__BRYTHON__)"], "crypto_js.rollups.md5": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(s,p){var m={},l=m.lib={},n=function(){},r=l.Base={extend:function(b){n.prototype=this;var h=new n;b&&h.mixIn(b);h.hasOwnProperty(\"init\")||(h.init=function(){h.$super.init.apply(this,arguments)});h.init.prototype=h;h.$super=this;return h},create:function(){var b=this.extend();b.init.apply(b,arguments);return b},init:function(){},mixIn:function(b){for(var h in b)b.hasOwnProperty(h)&&(this[h]=b[h]);b.hasOwnProperty(\"toString\")&&(this.toString=b.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=l.WordArray=r.extend({init:function(b,h){b=this.words=b||[];this.sigBytes=h!=p?h:4*b.length},toString:function(b){return(b||t).stringify(this)},concat:function(b){var h=this.words,a=b.words,j=this.sigBytes;b=b.sigBytes;this.clamp();if(j%4)for(var g=0;g>>2]|=(a[g>>>2]>>>24-8*(g%4)&255)<<24-8*((j+g)%4);else if(65535>>2]=a[g>>>2];else h.push.apply(h,a);this.sigBytes+=b;return this},clamp:function(){var b=this.words,h=this.sigBytes;b[h>>>2]&=4294967295<<\n32-8*(h%4);b.length=s.ceil(h/4)},clone:function(){var b=r.clone.call(this);b.words=this.words.slice(0);return b},random:function(b){for(var h=[],a=0;a>>2]>>>24-8*(j%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>3]|=parseInt(b.substr(j,\n2),16)<<24-4*(j%8);return new q.init(g,a/2)}},a=v.Latin1={stringify:function(b){var a=b.words;b=b.sigBytes;for(var g=[],j=0;j>>2]>>>24-8*(j%4)&255));return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>2]|=(b.charCodeAt(j)&255)<<24-8*(j%4);return new q.init(g,a)}},u=v.Utf8={stringify:function(b){try{return decodeURIComponent(escape(a.stringify(b)))}catch(g){throw Error(\"Malformed UTF-8 data\");}},parse:function(b){return a.parse(unescape(encodeURIComponent(b)))}},\ng=l.BufferedBlockAlgorithm=r.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(b){\"string\"==typeof b&&(b=u.parse(b));this._data.concat(b);this._nDataBytes+=b.sigBytes},_process:function(b){var a=this._data,g=a.words,j=a.sigBytes,k=this.blockSize,m=j/(4*k),m=b?s.ceil(m):s.max((m|0)-this._minBufferSize,0);b=m*k;j=s.min(4*b,j);if(b){for(var l=0;l>>32-j)+k}function m(a,k,b,h,l,j,m){a=a+(k&h|b&~h)+l+m;return(a<>>32-j)+k}function l(a,k,b,h,l,j,m){a=a+(k^b^h)+l+m;return(a<>>32-j)+k}function n(a,k,b,h,l,j,m){a=a+(b^(k|~h))+l+m;return(a<>>32-j)+k}for(var r=CryptoJS,q=r.lib,v=q.WordArray,t=q.Hasher,q=r.algo,a=[],u=0;64>u;u++)a[u]=4294967296*s.abs(s.sin(u+1))|0;q=q.MD5=t.extend({_doReset:function(){this._hash=new v.init([1732584193,4023233417,2562383102,271733878])},\n_doProcessBlock:function(g,k){for(var b=0;16>b;b++){var h=k+b,w=g[h];g[h]=(w<<8|w>>>24)&16711935|(w<<24|w>>>8)&4278255360}var b=this._hash.words,h=g[k+0],w=g[k+1],j=g[k+2],q=g[k+3],r=g[k+4],s=g[k+5],t=g[k+6],u=g[k+7],v=g[k+8],x=g[k+9],y=g[k+10],z=g[k+11],A=g[k+12],B=g[k+13],C=g[k+14],D=g[k+15],c=b[0],d=b[1],e=b[2],f=b[3],c=p(c,d,e,f,h,7,a[0]),f=p(f,c,d,e,w,12,a[1]),e=p(e,f,c,d,j,17,a[2]),d=p(d,e,f,c,q,22,a[3]),c=p(c,d,e,f,r,7,a[4]),f=p(f,c,d,e,s,12,a[5]),e=p(e,f,c,d,t,17,a[6]),d=p(d,e,f,c,u,22,a[7]),\nc=p(c,d,e,f,v,7,a[8]),f=p(f,c,d,e,x,12,a[9]),e=p(e,f,c,d,y,17,a[10]),d=p(d,e,f,c,z,22,a[11]),c=p(c,d,e,f,A,7,a[12]),f=p(f,c,d,e,B,12,a[13]),e=p(e,f,c,d,C,17,a[14]),d=p(d,e,f,c,D,22,a[15]),c=m(c,d,e,f,w,5,a[16]),f=m(f,c,d,e,t,9,a[17]),e=m(e,f,c,d,z,14,a[18]),d=m(d,e,f,c,h,20,a[19]),c=m(c,d,e,f,s,5,a[20]),f=m(f,c,d,e,y,9,a[21]),e=m(e,f,c,d,D,14,a[22]),d=m(d,e,f,c,r,20,a[23]),c=m(c,d,e,f,x,5,a[24]),f=m(f,c,d,e,C,9,a[25]),e=m(e,f,c,d,q,14,a[26]),d=m(d,e,f,c,v,20,a[27]),c=m(c,d,e,f,B,5,a[28]),f=m(f,c,\nd,e,j,9,a[29]),e=m(e,f,c,d,u,14,a[30]),d=m(d,e,f,c,A,20,a[31]),c=l(c,d,e,f,s,4,a[32]),f=l(f,c,d,e,v,11,a[33]),e=l(e,f,c,d,z,16,a[34]),d=l(d,e,f,c,C,23,a[35]),c=l(c,d,e,f,w,4,a[36]),f=l(f,c,d,e,r,11,a[37]),e=l(e,f,c,d,u,16,a[38]),d=l(d,e,f,c,y,23,a[39]),c=l(c,d,e,f,B,4,a[40]),f=l(f,c,d,e,h,11,a[41]),e=l(e,f,c,d,q,16,a[42]),d=l(d,e,f,c,t,23,a[43]),c=l(c,d,e,f,x,4,a[44]),f=l(f,c,d,e,A,11,a[45]),e=l(e,f,c,d,D,16,a[46]),d=l(d,e,f,c,j,23,a[47]),c=n(c,d,e,f,h,6,a[48]),f=n(f,c,d,e,u,10,a[49]),e=n(e,f,c,d,\nC,15,a[50]),d=n(d,e,f,c,s,21,a[51]),c=n(c,d,e,f,A,6,a[52]),f=n(f,c,d,e,q,10,a[53]),e=n(e,f,c,d,y,15,a[54]),d=n(d,e,f,c,w,21,a[55]),c=n(c,d,e,f,v,6,a[56]),f=n(f,c,d,e,D,10,a[57]),e=n(e,f,c,d,t,15,a[58]),d=n(d,e,f,c,B,21,a[59]),c=n(c,d,e,f,r,6,a[60]),f=n(f,c,d,e,z,10,a[61]),e=n(e,f,c,d,j,15,a[62]),d=n(d,e,f,c,x,21,a[63]);b[0]=b[0]+c|0;b[1]=b[1]+d|0;b[2]=b[2]+e|0;b[3]=b[3]+f|0},_doFinalize:function(){var a=this._data,k=a.words,b=8*this._nDataBytes,h=8*a.sigBytes;k[h>>>5]|=128<<24-h%32;var l=s.floor(b/\n4294967296);k[(h+64>>>9<<4)+15]=(l<<8|l>>>24)&16711935|(l<<24|l>>>8)&4278255360;k[(h+64>>>9<<4)+14]=(b<<8|b>>>24)&16711935|(b<<24|b>>>8)&4278255360;a.sigBytes=4*(k.length+1);this._process();a=this._hash;k=a.words;for(b=0;4>b;b++)h=k[b],k[b]=(h<<8|h>>>24)&16711935|(h<<24|h>>>8)&4278255360;return a},clone:function(){var a=t.clone.call(this);a._hash=this._hash.clone();return a}});r.MD5=t._createHelper(q);r.HmacMD5=t._createHmacHelper(q)})(Math);\n"], "crypto_js.rollups.sha1": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(e,m){var p={},j=p.lib={},l=function(){},f=j.Base={extend:function(a){l.prototype=this;var c=new l;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nn=j.WordArray=f.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=m?c:4*a.length},toString:function(a){return(a||h).stringify(this)},concat:function(a){var c=this.words,q=a.words,d=this.sigBytes;a=a.sigBytes;this.clamp();if(d%4)for(var b=0;b>>2]|=(q[b>>>2]>>>24-8*(b%4)&255)<<24-8*((d+b)%4);else if(65535>>2]=q[b>>>2];else c.push.apply(c,q);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=e.ceil(c/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],b=0;b>>2]>>>24-8*(d%4)&255;b.push((f>>>4).toString(16));b.push((f&15).toString(16))}return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>3]|=parseInt(a.substr(d,\n2),16)<<24-4*(d%8);return new n.init(b,c/2)}},g=b.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var b=[],d=0;d>>2]>>>24-8*(d%4)&255));return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>2]|=(a.charCodeAt(d)&255)<<24-8*(d%4);return new n.init(b,c)}},r=b.Utf8={stringify:function(a){try{return decodeURIComponent(escape(g.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return g.parse(unescape(encodeURIComponent(a)))}},\nk=j.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new n.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=r.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,b=c.words,d=c.sigBytes,f=this.blockSize,h=d/(4*f),h=a?e.ceil(h):e.max((h|0)-this._minBufferSize,0);a=h*f;d=e.min(4*a,d);if(a){for(var g=0;ga;a++){if(16>a)l[a]=f[n+a]|0;else{var c=l[a-3]^l[a-8]^l[a-14]^l[a-16];l[a]=c<<1|c>>>31}c=(h<<5|h>>>27)+j+l[a];c=20>a?c+((g&e|~g&k)+1518500249):40>a?c+((g^e^k)+1859775393):60>a?c+((g&e|g&k|e&k)-1894007588):c+((g^e^\nk)-899497514);j=k;k=e;e=g<<30|g>>>2;g=h;h=c}b[0]=b[0]+h|0;b[1]=b[1]+g|0;b[2]=b[2]+e|0;b[3]=b[3]+k|0;b[4]=b[4]+j|0},_doFinalize:function(){var f=this._data,e=f.words,b=8*this._nDataBytes,h=8*f.sigBytes;e[h>>>5]|=128<<24-h%32;e[(h+64>>>9<<4)+14]=Math.floor(b/4294967296);e[(h+64>>>9<<4)+15]=b;f.sigBytes=4*e.length;this._process();return this._hash},clone:function(){var e=j.clone.call(this);e._hash=this._hash.clone();return e}});e.SHA1=j._createHelper(m);e.HmacSHA1=j._createHmacHelper(m)})();\n"], "crypto_js.rollups.sha224": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(g,l){var f={},k=f.lib={},h=function(){},m=k.Base={extend:function(a){h.prototype=this;var c=new h;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=k.WordArray=m.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=l?c:4*a.length},toString:function(a){return(a||s).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=g.ceil(c/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},n=t.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},j=t.Utf8={stringify:function(a){try{return decodeURIComponent(escape(n.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return n.parse(unescape(encodeURIComponent(a)))}},\nw=k.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=j.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?g.ceil(f):g.max((f|0)-this._minBufferSize,0);a=f*e;b=g.min(4*a,b);if(a){for(var u=0;un;){var j;a:{j=s;for(var w=g.sqrt(j),v=2;v<=w;v++)if(!(j%v)){j=!1;break a}j=!0}j&&(8>n&&(m[n]=t(g.pow(s,0.5))),q[n]=t(g.pow(s,1/3)),n++);s++}var a=[],f=f.SHA256=h.extend({_doReset:function(){this._hash=new k.init(m.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],g=b[2],k=b[3],h=b[4],l=b[5],m=b[6],n=b[7],p=0;64>p;p++){if(16>p)a[p]=\nc[d+p]|0;else{var j=a[p-15],r=a[p-2];a[p]=((j<<25|j>>>7)^(j<<14|j>>>18)^j>>>3)+a[p-7]+((r<<15|r>>>17)^(r<<13|r>>>19)^r>>>10)+a[p-16]}j=n+((h<<26|h>>>6)^(h<<21|h>>>11)^(h<<7|h>>>25))+(h&l^~h&m)+q[p]+a[p];r=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&g^f&g);n=m;m=l;l=h;h=k+j|0;k=g;g=f;f=e;e=j+r|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+g|0;b[3]=b[3]+k|0;b[4]=b[4]+h|0;b[5]=b[5]+l|0;b[6]=b[6]+m|0;b[7]=b[7]+n|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=g.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=h.clone.call(this);a._hash=this._hash.clone();return a}});l.SHA256=h._createHelper(f);l.HmacSHA256=h._createHmacHelper(f)})(Math);\n(function(){var g=CryptoJS,l=g.lib.WordArray,f=g.algo,k=f.SHA256,f=f.SHA224=k.extend({_doReset:function(){this._hash=new l.init([3238371032,914150663,812702999,4144912697,4290775857,1750603025,1694076839,3204075428])},_doFinalize:function(){var f=k._doFinalize.call(this);f.sigBytes-=4;return f}});g.SHA224=k._createHelper(f);g.HmacSHA224=k._createHmacHelper(f)})();\n"], "crypto_js.rollups.sha256": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(h,s){var f={},t=f.lib={},g=function(){},j=t.Base={extend:function(a){g.prototype=this;var c=new g;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=t.WordArray=j.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=s?c:4*a.length},toString:function(a){return(a||u).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=h.ceil(c/4)},clone:function(){var a=j.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},k=v.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},l=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(k.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return k.parse(unescape(encodeURIComponent(a)))}},\nx=t.BufferedBlockAlgorithm=j.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=l.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?h.ceil(f):h.max((f|0)-this._minBufferSize,0);a=f*e;b=h.min(4*a,b);if(a){for(var m=0;mk;){var l;a:{l=u;for(var x=h.sqrt(l),w=2;w<=x;w++)if(!(l%w)){l=!1;break a}l=!0}l&&(8>k&&(j[k]=v(h.pow(u,0.5))),q[k]=v(h.pow(u,1/3)),k++);u++}var a=[],f=f.SHA256=g.extend({_doReset:function(){this._hash=new t.init(j.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],m=b[2],h=b[3],p=b[4],j=b[5],k=b[6],l=b[7],n=0;64>n;n++){if(16>n)a[n]=\nc[d+n]|0;else{var r=a[n-15],g=a[n-2];a[n]=((r<<25|r>>>7)^(r<<14|r>>>18)^r>>>3)+a[n-7]+((g<<15|g>>>17)^(g<<13|g>>>19)^g>>>10)+a[n-16]}r=l+((p<<26|p>>>6)^(p<<21|p>>>11)^(p<<7|p>>>25))+(p&j^~p&k)+q[n]+a[n];g=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&m^f&m);l=k;k=j;j=p;p=h+r|0;h=m;m=f;f=e;e=r+g|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+m|0;b[3]=b[3]+h|0;b[4]=b[4]+p|0;b[5]=b[5]+j|0;b[6]=b[6]+k|0;b[7]=b[7]+l|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=h.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=g.clone.call(this);a._hash=this._hash.clone();return a}});s.SHA256=g._createHelper(f);s.HmacSHA256=g._createHmacHelper(f)})(Math);\n"], "crypto_js.rollups.sha3": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(v,p){var d={},u=d.lib={},r=function(){},f=u.Base={extend:function(a){r.prototype=this;var b=new r;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\ns=u.WordArray=f.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=p?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,c=a.words,j=this.sigBytes;a=a.sigBytes;this.clamp();if(j%4)for(var n=0;n>>2]|=(c[n>>>2]>>>24-8*(n%4)&255)<<24-8*((j+n)%4);else if(65535>>2]=c[n>>>2];else b.push.apply(b,c);this.sigBytes+=a;return this},clamp:function(){var a=this.words,b=this.sigBytes;a[b>>>2]&=4294967295<<\n32-8*(b%4);a.length=v.ceil(b/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var b=[],c=0;c>>2]>>>24-8*(j%4)&255;c.push((n>>>4).toString(16));c.push((n&15).toString(16))}return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>3]|=parseInt(a.substr(j,\n2),16)<<24-4*(j%8);return new s.init(c,b/2)}},e=x.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var c=[],j=0;j>>2]>>>24-8*(j%4)&255));return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>2]|=(a.charCodeAt(j)&255)<<24-8*(j%4);return new s.init(c,b)}},q=x.Utf8={stringify:function(a){try{return decodeURIComponent(escape(e.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return e.parse(unescape(encodeURIComponent(a)))}},\nt=u.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new s.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=q.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var b=this._data,c=b.words,j=b.sigBytes,n=this.blockSize,e=j/(4*n),e=a?v.ceil(e):v.max((e|0)-this._minBufferSize,0);a=e*n;j=v.min(4*a,j);if(a){for(var f=0;ft;t++){s[e+5*q]=(t+1)*(t+2)/2%64;var w=(2*e+3*q)%5,e=q%5,q=w}for(e=0;5>e;e++)for(q=0;5>q;q++)x[e+5*q]=q+5*((2*e+3*q)%5);e=1;for(q=0;24>q;q++){for(var a=w=t=0;7>a;a++){if(e&1){var b=(1<b?w^=1<e;e++)c[e]=f.create();d=d.SHA3=r.extend({cfg:r.cfg.extend({outputLength:512}),_doReset:function(){for(var a=this._state=\n[],b=0;25>b;b++)a[b]=new f.init;this.blockSize=(1600-2*this.cfg.outputLength)/32},_doProcessBlock:function(a,b){for(var e=this._state,f=this.blockSize/2,h=0;h>>24)&16711935|(l<<24|l>>>8)&4278255360,m=(m<<8|m>>>24)&16711935|(m<<24|m>>>8)&4278255360,g=e[h];g.high^=m;g.low^=l}for(f=0;24>f;f++){for(h=0;5>h;h++){for(var d=l=0,k=0;5>k;k++)g=e[h+5*k],l^=g.high,d^=g.low;g=c[h];g.high=l;g.low=d}for(h=0;5>h;h++){g=c[(h+4)%5];l=c[(h+1)%5];m=l.high;k=l.low;l=g.high^\n(m<<1|k>>>31);d=g.low^(k<<1|m>>>31);for(k=0;5>k;k++)g=e[h+5*k],g.high^=l,g.low^=d}for(m=1;25>m;m++)g=e[m],h=g.high,g=g.low,k=s[m],32>k?(l=h<>>32-k,d=g<>>32-k):(l=g<>>64-k,d=h<>>64-k),g=c[x[m]],g.high=l,g.low=d;g=c[0];h=e[0];g.high=h.high;g.low=h.low;for(h=0;5>h;h++)for(k=0;5>k;k++)m=h+5*k,g=e[m],l=c[m],m=c[(h+1)%5+5*k],d=c[(h+2)%5+5*k],g.high=l.high^~m.high&d.high,g.low=l.low^~m.low&d.low;g=e[0];h=y[f];g.high^=h.high;g.low^=h.low}},_doFinalize:function(){var a=this._data,\nb=a.words,c=8*a.sigBytes,e=32*this.blockSize;b[c>>>5]|=1<<24-c%32;b[(v.ceil((c+1)/e)*e>>>5)-1]|=128;a.sigBytes=4*b.length;this._process();for(var a=this._state,b=this.cfg.outputLength/8,c=b/8,e=[],h=0;h>>24)&16711935|(f<<24|f>>>8)&4278255360,d=(d<<8|d>>>24)&16711935|(d<<24|d>>>8)&4278255360;e.push(d);e.push(f)}return new u.init(e,b)},clone:function(){for(var a=r.clone.call(this),b=a._state=this._state.slice(0),c=0;25>c;c++)b[c]=b[c].clone();return a}});\np.SHA3=r._createHelper(d);p.HmacSHA3=r._createHmacHelper(d)})(Math);\n"], "crypto_js.rollups.sha384": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,c){var d={},j=d.lib={},f=function(){},m=j.Base={extend:function(a){f.prototype=this;var b=new f;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nB=j.WordArray=m.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=c?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,g=a.words,e=this.sigBytes;a=a.sigBytes;this.clamp();if(e%4)for(var k=0;k>>2]|=(g[k>>>2]>>>24-8*(k%4)&255)<<24-8*((e+k)%4);else if(65535>>2]=g[k>>>2];else b.push.apply(b,g);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],g=0;g>>2]>>>24-8*(e%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>3]|=parseInt(a.substr(e,\n2),16)<<24-4*(e%8);return new B.init(g,b/2)}},F=v.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var g=[],e=0;e>>2]>>>24-8*(e%4)&255));return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>2]|=(a.charCodeAt(e)&255)<<24-8*(e%4);return new B.init(g,b)}},ha=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(F.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return F.parse(unescape(encodeURIComponent(a)))}},\nZ=j.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new B.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=ha.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,g=b.words,e=b.sigBytes,k=this.blockSize,m=e/(4*k),m=n?a.ceil(m):a.max((m|0)-this._minBufferSize,0);n=m*k;e=a.min(4*n,e);if(n){for(var c=0;cy;y++)v[y]=a();j=j.SHA512=d.extend({_doReset:function(){this._hash=new m.init([new f.init(1779033703,4089235720),new f.init(3144134277,2227873595),new f.init(1013904242,4271175723),new f.init(2773480762,1595750129),new f.init(1359893119,2917565137),new f.init(2600822924,725511199),new f.init(528734635,4215389547),new f.init(1541459225,327033209)])},_doProcessBlock:function(a,c){for(var d=this._hash.words,\nf=d[0],j=d[1],b=d[2],g=d[3],e=d[4],k=d[5],m=d[6],d=d[7],y=f.high,M=f.low,$=j.high,N=j.low,aa=b.high,O=b.low,ba=g.high,P=g.low,ca=e.high,Q=e.low,da=k.high,R=k.low,ea=m.high,S=m.low,fa=d.high,T=d.low,s=y,p=M,G=$,D=N,H=aa,E=O,W=ba,I=P,t=ca,q=Q,U=da,J=R,V=ea,K=S,X=fa,L=T,u=0;80>u;u++){var z=v[u];if(16>u)var r=z.high=a[c+2*u]|0,h=z.low=a[c+2*u+1]|0;else{var r=v[u-15],h=r.high,w=r.low,r=(h>>>1|w<<31)^(h>>>8|w<<24)^h>>>7,w=(w>>>1|h<<31)^(w>>>8|h<<24)^(w>>>7|h<<25),C=v[u-2],h=C.high,l=C.low,C=(h>>>19|l<<\n13)^(h<<3|l>>>29)^h>>>6,l=(l>>>19|h<<13)^(l<<3|h>>>29)^(l>>>6|h<<26),h=v[u-7],Y=h.high,A=v[u-16],x=A.high,A=A.low,h=w+h.low,r=r+Y+(h>>>0>>0?1:0),h=h+l,r=r+C+(h>>>0>>0?1:0),h=h+A,r=r+x+(h>>>0>>0?1:0);z.high=r;z.low=h}var Y=t&U^~t&V,A=q&J^~q&K,z=s&G^s&H^G&H,ja=p&D^p&E^D&E,w=(s>>>28|p<<4)^(s<<30|p>>>2)^(s<<25|p>>>7),C=(p>>>28|s<<4)^(p<<30|s>>>2)^(p<<25|s>>>7),l=B[u],ka=l.high,ga=l.low,l=L+((q>>>14|t<<18)^(q>>>18|t<<14)^(q<<23|t>>>9)),x=X+((t>>>14|q<<18)^(t>>>18|q<<14)^(t<<23|q>>>9))+(l>>>0<\nL>>>0?1:0),l=l+A,x=x+Y+(l>>>0>>0?1:0),l=l+ga,x=x+ka+(l>>>0>>0?1:0),l=l+h,x=x+r+(l>>>0>>0?1:0),h=C+ja,z=w+z+(h>>>0>>0?1:0),X=V,L=K,V=U,K=J,U=t,J=q,q=I+l|0,t=W+x+(q>>>0>>0?1:0)|0,W=H,I=E,H=G,E=D,G=s,D=p,p=l+h|0,s=x+z+(p>>>0>>0?1:0)|0}M=f.low=M+p;f.high=y+s+(M>>>0

>>0?1:0);N=j.low=N+D;j.high=$+G+(N>>>0>>0?1:0);O=b.low=O+E;b.high=aa+H+(O>>>0>>0?1:0);P=g.low=P+I;g.high=ba+W+(P>>>0>>0?1:0);Q=e.low=Q+q;e.high=ca+t+(Q>>>0>>0?1:0);R=k.low=R+J;k.high=da+U+(R>>>0>>0?1:0);\nS=m.low=S+K;m.high=ea+V+(S>>>0>>0?1:0);T=d.low=T+L;d.high=fa+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,c=a.words,d=8*this._nDataBytes,f=8*a.sigBytes;c[f>>>5]|=128<<24-f%32;c[(f+128>>>10<<5)+30]=Math.floor(d/4294967296);c[(f+128>>>10<<5)+31]=d;a.sigBytes=4*c.length;this._process();return this._hash.toX32()},clone:function(){var a=d.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});c.SHA512=d._createHelper(j);c.HmacSHA512=d._createHmacHelper(j)})();\n(function(){var a=CryptoJS,c=a.x64,d=c.Word,j=c.WordArray,c=a.algo,f=c.SHA512,c=c.SHA384=f.extend({_doReset:function(){this._hash=new j.init([new d.init(3418070365,3238371032),new d.init(1654270250,914150663),new d.init(2438529370,812702999),new d.init(355462360,4144912697),new d.init(1731405415,4290775857),new d.init(2394180231,1750603025),new d.init(3675008525,1694076839),new d.init(1203062813,3204075428)])},_doFinalize:function(){var a=f._doFinalize.call(this);a.sigBytes-=16;return a}});a.SHA384=\nf._createHelper(c);a.HmacSHA384=f._createHmacHelper(c)})();\n"], "crypto_js.rollups.sha512": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,m){var r={},f=r.lib={},g=function(){},l=f.Base={extend:function(a){g.prototype=this;var b=new g;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\np=f.WordArray=l.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=m?b:4*a.length},toString:function(a){return(a||q).stringify(this)},concat:function(a){var b=this.words,d=a.words,c=this.sigBytes;a=a.sigBytes;this.clamp();if(c%4)for(var j=0;j>>2]|=(d[j>>>2]>>>24-8*(j%4)&255)<<24-8*((c+j)%4);else if(65535>>2]=d[j>>>2];else b.push.apply(b,d);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=l.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],d=0;d>>2]>>>24-8*(c%4)&255;d.push((j>>>4).toString(16));d.push((j&15).toString(16))}return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>3]|=parseInt(a.substr(c,\n2),16)<<24-4*(c%8);return new p.init(d,b/2)}},G=y.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var d=[],c=0;c>>2]>>>24-8*(c%4)&255));return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>2]|=(a.charCodeAt(c)&255)<<24-8*(c%4);return new p.init(d,b)}},fa=y.Utf8={stringify:function(a){try{return decodeURIComponent(escape(G.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return G.parse(unescape(encodeURIComponent(a)))}},\nh=f.BufferedBlockAlgorithm=l.extend({reset:function(){this._data=new p.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=fa.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,d=b.words,c=b.sigBytes,j=this.blockSize,l=c/(4*j),l=n?a.ceil(l):a.max((l|0)-this._minBufferSize,0);n=l*j;c=a.min(4*n,c);if(n){for(var h=0;hq;q++)y[q]=a();f=f.SHA512=r.extend({_doReset:function(){this._hash=new l.init([new g.init(1779033703,4089235720),new g.init(3144134277,2227873595),new g.init(1013904242,4271175723),new g.init(2773480762,1595750129),new g.init(1359893119,2917565137),new g.init(2600822924,725511199),new g.init(528734635,4215389547),new g.init(1541459225,327033209)])},_doProcessBlock:function(a,f){for(var h=this._hash.words,\ng=h[0],n=h[1],b=h[2],d=h[3],c=h[4],j=h[5],l=h[6],h=h[7],q=g.high,m=g.low,r=n.high,N=n.low,Z=b.high,O=b.low,$=d.high,P=d.low,aa=c.high,Q=c.low,ba=j.high,R=j.low,ca=l.high,S=l.low,da=h.high,T=h.low,v=q,s=m,H=r,E=N,I=Z,F=O,W=$,J=P,w=aa,t=Q,U=ba,K=R,V=ca,L=S,X=da,M=T,x=0;80>x;x++){var B=y[x];if(16>x)var u=B.high=a[f+2*x]|0,e=B.low=a[f+2*x+1]|0;else{var u=y[x-15],e=u.high,z=u.low,u=(e>>>1|z<<31)^(e>>>8|z<<24)^e>>>7,z=(z>>>1|e<<31)^(z>>>8|e<<24)^(z>>>7|e<<25),D=y[x-2],e=D.high,k=D.low,D=(e>>>19|k<<13)^\n(e<<3|k>>>29)^e>>>6,k=(k>>>19|e<<13)^(k<<3|e>>>29)^(k>>>6|e<<26),e=y[x-7],Y=e.high,C=y[x-16],A=C.high,C=C.low,e=z+e.low,u=u+Y+(e>>>0>>0?1:0),e=e+k,u=u+D+(e>>>0>>0?1:0),e=e+C,u=u+A+(e>>>0>>0?1:0);B.high=u;B.low=e}var Y=w&U^~w&V,C=t&K^~t&L,B=v&H^v&I^H&I,ha=s&E^s&F^E&F,z=(v>>>28|s<<4)^(v<<30|s>>>2)^(v<<25|s>>>7),D=(s>>>28|v<<4)^(s<<30|v>>>2)^(s<<25|v>>>7),k=p[x],ia=k.high,ea=k.low,k=M+((t>>>14|w<<18)^(t>>>18|w<<14)^(t<<23|w>>>9)),A=X+((w>>>14|t<<18)^(w>>>18|t<<14)^(w<<23|t>>>9))+(k>>>0>>\n0?1:0),k=k+C,A=A+Y+(k>>>0>>0?1:0),k=k+ea,A=A+ia+(k>>>0>>0?1:0),k=k+e,A=A+u+(k>>>0>>0?1:0),e=D+ha,B=z+B+(e>>>0>>0?1:0),X=V,M=L,V=U,L=K,U=w,K=t,t=J+k|0,w=W+A+(t>>>0>>0?1:0)|0,W=I,J=F,I=H,F=E,H=v,E=s,s=k+e|0,v=A+B+(s>>>0>>0?1:0)|0}m=g.low=m+s;g.high=q+v+(m>>>0>>0?1:0);N=n.low=N+E;n.high=r+H+(N>>>0>>0?1:0);O=b.low=O+F;b.high=Z+I+(O>>>0>>0?1:0);P=d.low=P+J;d.high=$+W+(P>>>0>>0?1:0);Q=c.low=Q+t;c.high=aa+w+(Q>>>0>>0?1:0);R=j.low=R+K;j.high=ba+U+(R>>>0>>0?1:0);S=l.low=\nS+L;l.high=ca+V+(S>>>0>>0?1:0);T=h.low=T+M;h.high=da+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,f=a.words,h=8*this._nDataBytes,g=8*a.sigBytes;f[g>>>5]|=128<<24-g%32;f[(g+128>>>10<<5)+30]=Math.floor(h/4294967296);f[(g+128>>>10<<5)+31]=h;a.sigBytes=4*f.length;this._process();return this._hash.toX32()},clone:function(){var a=r.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});m.SHA512=r._createHelper(f);m.HmacSHA512=r._createHmacHelper(f)})();\n"], "abc": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) according to PEP 3119.\"\"\"\n\n\ndef abstractmethod(funcobj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n funcobj.__isabstractmethod__=True\n return funcobj\n \n \nclass abstractclassmethod(classmethod):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __isabstractmethod__=True\n \n def __init__(self,callable):\n callable.__isabstractmethod__=True\n super().__init__(callable)\n \n \nclass abstractstaticmethod(staticmethod):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __isabstractmethod__=True\n \n def __init__(self,callable):\n callable.__isabstractmethod__=True\n super().__init__(callable)\n \n \nclass abstractproperty(property):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __isabstractmethod__=True\n \n \ntry :\n from _abc import (get_cache_token,_abc_init,_abc_register,\n _abc_instancecheck,_abc_subclasscheck,_get_dump,\n _reset_registry,_reset_caches)\nexcept ImportError:\n from _py_abc import ABCMeta,get_cache_token\n ABCMeta.__module__='abc'\nelse :\n class ABCMeta(type):\n ''\n\n\n\n\n\n\n\n\n\n\n \n def __new__(mcls,name,bases,namespace,**kwargs):\n cls=super().__new__(mcls,name,bases,namespace,**kwargs)\n _abc_init(cls)\n return cls\n \n def register(cls,subclass):\n ''\n\n\n \n return _abc_register(cls,subclass)\n \n def __instancecheck__(cls,instance):\n ''\n return _abc_instancecheck(cls,instance)\n \n def __subclasscheck__(cls,subclass):\n ''\n return _abc_subclasscheck(cls,subclass)\n \n def _dump_registry(cls,file=None ):\n ''\n print(f\"Class: {cls.__module__}.{cls.__qualname__}\",file=file)\n print(f\"Inv. counter: {get_cache_token()}\",file=file)\n (_abc_registry,_abc_cache,_abc_negative_cache,\n _abc_negative_cache_version)=_get_dump(cls)\n print(f\"_abc_registry: {_abc_registry!r}\",file=file)\n print(f\"_abc_cache: {_abc_cache!r}\",file=file)\n print(f\"_abc_negative_cache: {_abc_negative_cache!r}\",file=file)\n print(f\"_abc_negative_cache_version: {_abc_negative_cache_version!r}\",\n file=file)\n \n def _abc_registry_clear(cls):\n ''\n _reset_registry(cls)\n \n def _abc_caches_clear(cls):\n ''\n _reset_caches(cls)\n \n \nclass ABC(metaclass=ABCMeta):\n ''\n\n \n __slots__=()\n", ["_abc", "_py_abc"]], "antigravity": [".py", "\nimport webbrowser\nimport hashlib\n\nwebbrowser.open(\"https://xkcd.com/353/\")\n\ndef geohash(latitude,longitude,datedow):\n ''\n\n\n\n\n \n \n h=hashlib.md5(datedow).hexdigest()\n p,q=[('%f'%float.fromhex('0.'+x))for x in (h[:16],h[16:32])]\n print('%d%s %d%s'%(latitude,p[1:],longitude,q[1:]))\n", ["hashlib", "webbrowser"]], "argparse": [".py", "\n\n\"\"\"Command-line parsing library\n\nThis module is an optparse-inspired command-line parsing library that:\n\n - handles both optional and positional arguments\n - produces highly informative usage messages\n - supports parsers that dispatch to sub-parsers\n\nThe following is a simple usage example that sums integers from the\ncommand-line and writes the result to a file::\n\n parser = argparse.ArgumentParser(\n description='sum the integers at the command line')\n parser.add_argument(\n 'integers', metavar='int', nargs='+', type=int,\n help='an integer to be summed')\n parser.add_argument(\n '--log', default=sys.stdout, type=argparse.FileType('w'),\n help='the file where the sum should be written')\n args = parser.parse_args()\n args.log.write('%s' % sum(args.integers))\n args.log.close()\n\nThe module contains the following public classes:\n\n - ArgumentParser -- The main entry point for command-line parsing. As the\n example above shows, the add_argument() method is used to populate\n the parser with actions for optional and positional arguments. Then\n the parse_args() method is invoked to convert the args at the\n command-line into an object with attributes.\n\n - ArgumentError -- The exception raised by ArgumentParser objects when\n there are errors with the parser's actions. Errors raised while\n parsing the command-line are caught by ArgumentParser and emitted\n as command-line messages.\n\n - FileType -- A factory for defining types of files to be created. As the\n example above shows, instances of FileType are typically passed as\n the type= argument of add_argument() calls.\n\n - Action -- The base class for parser actions. Typically actions are\n selected by passing strings like 'store_true' or 'append_const' to\n the action= argument of add_argument(). However, for greater\n customization of ArgumentParser actions, subclasses of Action may\n be defined and passed as the action= argument.\n\n - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,\n ArgumentDefaultsHelpFormatter -- Formatter classes which\n may be passed as the formatter_class= argument to the\n ArgumentParser constructor. HelpFormatter is the default,\n RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser\n not to change the formatting for help text, and\n ArgumentDefaultsHelpFormatter adds information about argument defaults\n to the help.\n\nAll other classes in this module are considered implementation details.\n(Also note that HelpFormatter and RawDescriptionHelpFormatter are only\nconsidered public as object names -- the API of the formatter objects is\nstill considered an implementation detail.)\n\"\"\"\n\n__version__='1.1'\n__all__=[\n'ArgumentParser',\n'ArgumentError',\n'ArgumentTypeError',\n'FileType',\n'HelpFormatter',\n'ArgumentDefaultsHelpFormatter',\n'RawDescriptionHelpFormatter',\n'RawTextHelpFormatter',\n'MetavarTypeHelpFormatter',\n'Namespace',\n'Action',\n'ONE_OR_MORE',\n'OPTIONAL',\n'PARSER',\n'REMAINDER',\n'SUPPRESS',\n'ZERO_OR_MORE',\n]\n\n\nimport os as _os\nimport re as _re\nimport sys as _sys\n\nfrom gettext import gettext as _,ngettext\n\nSUPPRESS='==SUPPRESS=='\n\nOPTIONAL='?'\nZERO_OR_MORE='*'\nONE_OR_MORE='+'\nPARSER='A...'\nREMAINDER='...'\n_UNRECOGNIZED_ARGS_ATTR='_unrecognized_args'\n\n\n\n\n\nclass _AttributeHolder(object):\n ''\n\n\n\n\n\n \n \n def __repr__(self):\n type_name=type(self).__name__\n arg_strings=[]\n star_args={}\n for arg in self._get_args():\n arg_strings.append(repr(arg))\n for name,value in self._get_kwargs():\n if name.isidentifier():\n arg_strings.append('%s=%r'%(name,value))\n else :\n star_args[name]=value\n if star_args:\n arg_strings.append('**%s'%repr(star_args))\n return '%s(%s)'%(type_name,', '.join(arg_strings))\n \n def _get_kwargs(self):\n return sorted(self.__dict__.items())\n \n def _get_args(self):\n return []\n \n \ndef _copy_items(items):\n if items is None :\n return []\n \n \n \n if type(items)is list:\n return items[:]\n import copy\n return copy.copy(items)\n \n \n \n \n \n \nclass HelpFormatter(object):\n ''\n\n\n\n \n \n def __init__(self,\n prog,\n indent_increment=2,\n max_help_position=24,\n width=None ):\n \n \n if width is None :\n try :\n width=int(_os.environ['COLUMNS'])\n except (KeyError,ValueError):\n width=80\n width -=2\n \n self._prog=prog\n self._indent_increment=indent_increment\n self._max_help_position=max_help_position\n self._max_help_position=min(max_help_position,\n max(width -20,indent_increment *2))\n self._width=width\n \n self._current_indent=0\n self._level=0\n self._action_max_length=0\n \n self._root_section=self._Section(self,None )\n self._current_section=self._root_section\n \n self._whitespace_matcher=_re.compile(r'\\s+',_re.ASCII)\n self._long_break_matcher=_re.compile(r'\\n\\n\\n+')\n \n \n \n \n def _indent(self):\n self._current_indent +=self._indent_increment\n self._level +=1\n \n def _dedent(self):\n self._current_indent -=self._indent_increment\n assert self._current_indent >=0,'Indent decreased below 0.'\n self._level -=1\n \n class _Section(object):\n \n def __init__(self,formatter,parent,heading=None ):\n self.formatter=formatter\n self.parent=parent\n self.heading=heading\n self.items=[]\n \n def format_help(self):\n \n if self.parent is not None :\n self.formatter._indent()\n join=self.formatter._join_parts\n item_help=join([func(*args)for func,args in self.items])\n if self.parent is not None :\n self.formatter._dedent()\n \n \n if not item_help:\n return ''\n \n \n if self.heading is not SUPPRESS and self.heading is not None :\n current_indent=self.formatter._current_indent\n heading='%*s%s:\\n'%(current_indent,'',self.heading)\n else :\n heading=''\n \n \n return join(['\\n',heading,item_help,'\\n'])\n \n def _add_item(self,func,args):\n self._current_section.items.append((func,args))\n \n \n \n \n def start_section(self,heading):\n self._indent()\n section=self._Section(self,self._current_section,heading)\n self._add_item(section.format_help,[])\n self._current_section=section\n \n def end_section(self):\n self._current_section=self._current_section.parent\n self._dedent()\n \n def add_text(self,text):\n if text is not SUPPRESS and text is not None :\n self._add_item(self._format_text,[text])\n \n def add_usage(self,usage,actions,groups,prefix=None ):\n if usage is not SUPPRESS:\n args=usage,actions,groups,prefix\n self._add_item(self._format_usage,args)\n \n def add_argument(self,action):\n if action.help is not SUPPRESS:\n \n \n get_invocation=self._format_action_invocation\n invocations=[get_invocation(action)]\n for subaction in self._iter_indented_subactions(action):\n invocations.append(get_invocation(subaction))\n \n \n invocation_length=max([len(s)for s in invocations])\n action_length=invocation_length+self._current_indent\n self._action_max_length=max(self._action_max_length,\n action_length)\n \n \n self._add_item(self._format_action,[action])\n \n def add_arguments(self,actions):\n for action in actions:\n self.add_argument(action)\n \n \n \n \n def format_help(self):\n help=self._root_section.format_help()\n if help:\n help=self._long_break_matcher.sub('\\n\\n',help)\n help=help.strip('\\n')+'\\n'\n return help\n \n def _join_parts(self,part_strings):\n return ''.join([part\n for part in part_strings\n if part and part is not SUPPRESS])\n \n def _format_usage(self,usage,actions,groups,prefix):\n if prefix is None :\n prefix=_('usage: ')\n \n \n if usage is not None :\n usage=usage %dict(prog=self._prog)\n \n \n elif usage is None and not actions:\n usage='%(prog)s'%dict(prog=self._prog)\n \n \n elif usage is None :\n prog='%(prog)s'%dict(prog=self._prog)\n \n \n optionals=[]\n positionals=[]\n for action in actions:\n if action.option_strings:\n optionals.append(action)\n else :\n positionals.append(action)\n \n \n format=self._format_actions_usage\n action_usage=format(optionals+positionals,groups)\n usage=' '.join([s for s in [prog,action_usage]if s])\n \n \n text_width=self._width -self._current_indent\n if len(prefix)+len(usage)>text_width:\n \n \n part_regexp=(\n r'\\(.*?\\)+(?=\\s|$)|'\n r'\\[.*?\\]+(?=\\s|$)|'\n r'\\S+'\n )\n opt_usage=format(optionals,groups)\n pos_usage=format(positionals,groups)\n opt_parts=_re.findall(part_regexp,opt_usage)\n pos_parts=_re.findall(part_regexp,pos_usage)\n assert ' '.join(opt_parts)==opt_usage\n assert ' '.join(pos_parts)==pos_usage\n \n \n def get_lines(parts,indent,prefix=None ):\n lines=[]\n line=[]\n if prefix is not None :\n line_len=len(prefix)-1\n else :\n line_len=len(indent)-1\n for part in parts:\n if line_len+1+len(part)>text_width and line:\n lines.append(indent+' '.join(line))\n line=[]\n line_len=len(indent)-1\n line.append(part)\n line_len +=len(part)+1\n if line:\n lines.append(indent+' '.join(line))\n if prefix is not None :\n lines[0]=lines[0][len(indent):]\n return lines\n \n \n if len(prefix)+len(prog)<=0.75 *text_width:\n indent=' '*(len(prefix)+len(prog)+1)\n if opt_parts:\n lines=get_lines([prog]+opt_parts,indent,prefix)\n lines.extend(get_lines(pos_parts,indent))\n elif pos_parts:\n lines=get_lines([prog]+pos_parts,indent,prefix)\n else :\n lines=[prog]\n \n \n else :\n indent=' '*len(prefix)\n parts=opt_parts+pos_parts\n lines=get_lines(parts,indent)\n if len(lines)>1:\n lines=[]\n lines.extend(get_lines(opt_parts,indent))\n lines.extend(get_lines(pos_parts,indent))\n lines=[prog]+lines\n \n \n usage='\\n'.join(lines)\n \n \n return '%s%s\\n\\n'%(prefix,usage)\n \n def _format_actions_usage(self,actions,groups):\n \n group_actions=set()\n inserts={}\n for group in groups:\n try :\n start=actions.index(group._group_actions[0])\n except ValueError:\n continue\n else :\n end=start+len(group._group_actions)\n if actions[start:end]==group._group_actions:\n for action in group._group_actions:\n group_actions.add(action)\n if not group.required:\n if start in inserts:\n inserts[start]+=' ['\n else :\n inserts[start]='['\n inserts[end]=']'\n else :\n if start in inserts:\n inserts[start]+=' ('\n else :\n inserts[start]='('\n inserts[end]=')'\n for i in range(start+1,end):\n inserts[i]='|'\n \n \n parts=[]\n for i,action in enumerate(actions):\n \n \n \n if action.help is SUPPRESS:\n parts.append(None )\n if inserts.get(i)=='|':\n inserts.pop(i)\n elif inserts.get(i+1)=='|':\n inserts.pop(i+1)\n \n \n elif not action.option_strings:\n default=self._get_default_metavar_for_positional(action)\n part=self._format_args(action,default)\n \n \n if action in group_actions:\n if part[0]=='['and part[-1]==']':\n part=part[1:-1]\n \n \n parts.append(part)\n \n \n else :\n option_string=action.option_strings[0]\n \n \n \n if action.nargs ==0:\n part='%s'%option_string\n \n \n \n else :\n default=self._get_default_metavar_for_optional(action)\n args_string=self._format_args(action,default)\n part='%s %s'%(option_string,args_string)\n \n \n if not action.required and action not in group_actions:\n part='[%s]'%part\n \n \n parts.append(part)\n \n \n for i in sorted(inserts,reverse=True ):\n parts[i:i]=[inserts[i]]\n \n \n text=' '.join([item for item in parts if item is not None ])\n \n \n open=r'[\\[(]'\n close=r'[\\])]'\n text=_re.sub(r'(%s) '%open,r'\\1',text)\n text=_re.sub(r' (%s)'%close,r'\\1',text)\n text=_re.sub(r'%s *%s'%(open,close),r'',text)\n text=_re.sub(r'\\(([^|]*)\\)',r'\\1',text)\n text=text.strip()\n \n \n return text\n \n def _format_text(self,text):\n if '%(prog)'in text:\n text=text %dict(prog=self._prog)\n text_width=max(self._width -self._current_indent,11)\n indent=' '*self._current_indent\n return self._fill_text(text,text_width,indent)+'\\n\\n'\n \n def _format_action(self,action):\n \n help_position=min(self._action_max_length+2,\n self._max_help_position)\n help_width=max(self._width -help_position,11)\n action_width=help_position -self._current_indent -2\n action_header=self._format_action_invocation(action)\n \n \n if not action.help:\n tup=self._current_indent,'',action_header\n action_header='%*s%s\\n'%tup\n \n \n elif len(action_header)<=action_width:\n tup=self._current_indent,'',action_width,action_header\n action_header='%*s%-*s '%tup\n indent_first=0\n \n \n else :\n tup=self._current_indent,'',action_header\n action_header='%*s%s\\n'%tup\n indent_first=help_position\n \n \n parts=[action_header]\n \n \n if action.help:\n help_text=self._expand_help(action)\n help_lines=self._split_lines(help_text,help_width)\n parts.append('%*s%s\\n'%(indent_first,'',help_lines[0]))\n for line in help_lines[1:]:\n parts.append('%*s%s\\n'%(help_position,'',line))\n \n \n elif not action_header.endswith('\\n'):\n parts.append('\\n')\n \n \n for subaction in self._iter_indented_subactions(action):\n parts.append(self._format_action(subaction))\n \n \n return self._join_parts(parts)\n \n def _format_action_invocation(self,action):\n if not action.option_strings:\n default=self._get_default_metavar_for_positional(action)\n metavar,=self._metavar_formatter(action,default)(1)\n return metavar\n \n else :\n parts=[]\n \n \n \n if action.nargs ==0:\n parts.extend(action.option_strings)\n \n \n \n else :\n default=self._get_default_metavar_for_optional(action)\n args_string=self._format_args(action,default)\n for option_string in action.option_strings:\n parts.append('%s %s'%(option_string,args_string))\n \n return ', '.join(parts)\n \n def _metavar_formatter(self,action,default_metavar):\n if action.metavar is not None :\n result=action.metavar\n elif action.choices is not None :\n choice_strs=[str(choice)for choice in action.choices]\n result='{%s}'%','.join(choice_strs)\n else :\n result=default_metavar\n \n def format(tuple_size):\n if isinstance(result,tuple):\n return result\n else :\n return (result,)*tuple_size\n return format\n \n def _format_args(self,action,default_metavar):\n get_metavar=self._metavar_formatter(action,default_metavar)\n if action.nargs is None :\n result='%s'%get_metavar(1)\n elif action.nargs ==OPTIONAL:\n result='[%s]'%get_metavar(1)\n elif action.nargs ==ZERO_OR_MORE:\n result='[%s [%s ...]]'%get_metavar(2)\n elif action.nargs ==ONE_OR_MORE:\n result='%s [%s ...]'%get_metavar(2)\n elif action.nargs ==REMAINDER:\n result='...'\n elif action.nargs ==PARSER:\n result='%s ...'%get_metavar(1)\n elif action.nargs ==SUPPRESS:\n result=''\n else :\n formats=['%s'for _ in range(action.nargs)]\n result=' '.join(formats)%get_metavar(action.nargs)\n return result\n \n def _expand_help(self,action):\n params=dict(vars(action),prog=self._prog)\n for name in list(params):\n if params[name]is SUPPRESS:\n del params[name]\n for name in list(params):\n if hasattr(params[name],'__name__'):\n params[name]=params[name].__name__\n if params.get('choices')is not None :\n choices_str=', '.join([str(c)for c in params['choices']])\n params['choices']=choices_str\n return self._get_help_string(action)%params\n \n def _iter_indented_subactions(self,action):\n try :\n get_subactions=action._get_subactions\n except AttributeError:\n pass\n else :\n self._indent()\n yield from get_subactions()\n self._dedent()\n \n def _split_lines(self,text,width):\n text=self._whitespace_matcher.sub(' ',text).strip()\n \n \n import textwrap\n return textwrap.wrap(text,width)\n \n def _fill_text(self,text,width,indent):\n text=self._whitespace_matcher.sub(' ',text).strip()\n import textwrap\n return textwrap.fill(text,width,\n initial_indent=indent,\n subsequent_indent=indent)\n \n def _get_help_string(self,action):\n return action.help\n \n def _get_default_metavar_for_optional(self,action):\n return action.dest.upper()\n \n def _get_default_metavar_for_positional(self,action):\n return action.dest\n \n \nclass RawDescriptionHelpFormatter(HelpFormatter):\n ''\n\n\n\n \n \n def _fill_text(self,text,width,indent):\n return ''.join(indent+line for line in text.splitlines(keepends=True ))\n \n \nclass RawTextHelpFormatter(RawDescriptionHelpFormatter):\n ''\n\n\n\n \n \n def _split_lines(self,text,width):\n return text.splitlines()\n \n \nclass ArgumentDefaultsHelpFormatter(HelpFormatter):\n ''\n\n\n\n \n \n def _get_help_string(self,action):\n help=action.help\n if '%(default)'not in action.help:\n if action.default is not SUPPRESS:\n defaulting_nargs=[OPTIONAL,ZERO_OR_MORE]\n if action.option_strings or action.nargs in defaulting_nargs:\n help +=' (default: %(default)s)'\n return help\n \n \nclass MetavarTypeHelpFormatter(HelpFormatter):\n ''\n\n\n\n\n \n \n def _get_default_metavar_for_optional(self,action):\n return action.type.__name__\n \n def _get_default_metavar_for_positional(self,action):\n return action.type.__name__\n \n \n \n \n \n \n \ndef _get_action_name(argument):\n if argument is None :\n return None\n elif argument.option_strings:\n return '/'.join(argument.option_strings)\n elif argument.metavar not in (None ,SUPPRESS):\n return argument.metavar\n elif argument.dest not in (None ,SUPPRESS):\n return argument.dest\n else :\n return None\n \n \nclass ArgumentError(Exception):\n ''\n\n\n\n \n \n def __init__(self,argument,message):\n self.argument_name=_get_action_name(argument)\n self.message=message\n \n def __str__(self):\n if self.argument_name is None :\n format='%(message)s'\n else :\n format='argument %(argument_name)s: %(message)s'\n return format %dict(message=self.message,\n argument_name=self.argument_name)\n \n \nclass ArgumentTypeError(Exception):\n ''\n pass\n \n \n \n \n \n \nclass Action(_AttributeHolder):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n self.option_strings=option_strings\n self.dest=dest\n self.nargs=nargs\n self.const=const\n self.default=default\n self.type=type\n self.choices=choices\n self.required=required\n self.help=help\n self.metavar=metavar\n \n def _get_kwargs(self):\n names=[\n 'option_strings',\n 'dest',\n 'nargs',\n 'const',\n 'default',\n 'type',\n 'choices',\n 'help',\n 'metavar',\n ]\n return [(name,getattr(self,name))for name in names]\n \n def __call__(self,parser,namespace,values,option_string=None ):\n raise NotImplementedError(_('.__call__() not defined'))\n \n \nclass _StoreAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n if nargs ==0:\n raise ValueError('nargs for store actions must be > 0; if you '\n 'have nothing to store, actions such as store '\n 'true or store const may be more appropriate')\n if const is not None and nargs !=OPTIONAL:\n raise ValueError('nargs must be %r to supply const'%OPTIONAL)\n super(_StoreAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=nargs,\n const=const,\n default=default,\n type=type,\n choices=choices,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n setattr(namespace,self.dest,values)\n \n \nclass _StoreConstAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n const,\n default=None ,\n required=False ,\n help=None ,\n metavar=None ):\n super(_StoreConstAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n const=const,\n default=default,\n required=required,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n setattr(namespace,self.dest,self.const)\n \n \nclass _StoreTrueAction(_StoreConstAction):\n\n def __init__(self,\n option_strings,\n dest,\n default=False ,\n required=False ,\n help=None ):\n super(_StoreTrueAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n const=True ,\n default=default,\n required=required,\n help=help)\n \n \nclass _StoreFalseAction(_StoreConstAction):\n\n def __init__(self,\n option_strings,\n dest,\n default=True ,\n required=False ,\n help=None ):\n super(_StoreFalseAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n const=False ,\n default=default,\n required=required,\n help=help)\n \n \nclass _AppendAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n if nargs ==0:\n raise ValueError('nargs for append actions must be > 0; if arg '\n 'strings are not supplying the value to append, '\n 'the append const action may be more appropriate')\n if const is not None and nargs !=OPTIONAL:\n raise ValueError('nargs must be %r to supply const'%OPTIONAL)\n super(_AppendAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=nargs,\n const=const,\n default=default,\n type=type,\n choices=choices,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n items=getattr(namespace,self.dest,None )\n items=_copy_items(items)\n items.append(values)\n setattr(namespace,self.dest,items)\n \n \nclass _AppendConstAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n const,\n default=None ,\n required=False ,\n help=None ,\n metavar=None ):\n super(_AppendConstAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n const=const,\n default=default,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n items=getattr(namespace,self.dest,None )\n items=_copy_items(items)\n items.append(self.const)\n setattr(namespace,self.dest,items)\n \n \nclass _CountAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n default=None ,\n required=False ,\n help=None ):\n super(_CountAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n default=default,\n required=required,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n count=getattr(namespace,self.dest,None )\n if count is None :\n count=0\n setattr(namespace,self.dest,count+1)\n \n \nclass _HelpAction(Action):\n\n def __init__(self,\n option_strings,\n dest=SUPPRESS,\n default=SUPPRESS,\n help=None ):\n super(_HelpAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n parser.print_help()\n parser.exit()\n \n \nclass _VersionAction(Action):\n\n def __init__(self,\n option_strings,\n version=None ,\n dest=SUPPRESS,\n default=SUPPRESS,\n help=\"show program's version number and exit\"):\n super(_VersionAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n self.version=version\n \n def __call__(self,parser,namespace,values,option_string=None ):\n version=self.version\n if version is None :\n version=parser.version\n formatter=parser._get_formatter()\n formatter.add_text(version)\n parser._print_message(formatter.format_help(),_sys.stdout)\n parser.exit()\n \n \nclass _SubParsersAction(Action):\n\n class _ChoicesPseudoAction(Action):\n \n def __init__(self,name,aliases,help):\n metavar=dest=name\n if aliases:\n metavar +=' (%s)'%', '.join(aliases)\n sup=super(_SubParsersAction._ChoicesPseudoAction,self)\n sup.__init__(option_strings=[],dest=dest,help=help,\n metavar=metavar)\n \n def __init__(self,\n option_strings,\n prog,\n parser_class,\n dest=SUPPRESS,\n required=False ,\n help=None ,\n metavar=None ):\n \n self._prog_prefix=prog\n self._parser_class=parser_class\n self._name_parser_map={}\n self._choices_actions=[]\n \n super(_SubParsersAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=PARSER,\n choices=self._name_parser_map,\n required=required,\n help=help,\n metavar=metavar)\n \n def add_parser(self,name,**kwargs):\n \n if kwargs.get('prog')is None :\n kwargs['prog']='%s %s'%(self._prog_prefix,name)\n \n aliases=kwargs.pop('aliases',())\n \n \n if 'help'in kwargs:\n help=kwargs.pop('help')\n choice_action=self._ChoicesPseudoAction(name,aliases,help)\n self._choices_actions.append(choice_action)\n \n \n parser=self._parser_class(**kwargs)\n self._name_parser_map[name]=parser\n \n \n for alias in aliases:\n self._name_parser_map[alias]=parser\n \n return parser\n \n def _get_subactions(self):\n return self._choices_actions\n \n def __call__(self,parser,namespace,values,option_string=None ):\n parser_name=values[0]\n arg_strings=values[1:]\n \n \n if self.dest is not SUPPRESS:\n setattr(namespace,self.dest,parser_name)\n \n \n try :\n parser=self._name_parser_map[parser_name]\n except KeyError:\n args={'parser_name':parser_name,\n 'choices':', '.join(self._name_parser_map)}\n msg=_('unknown parser %(parser_name)r (choices: %(choices)s)')%args\n raise ArgumentError(self,msg)\n \n \n \n \n \n \n \n \n subnamespace,arg_strings=parser.parse_known_args(arg_strings,None )\n for key,value in vars(subnamespace).items():\n setattr(namespace,key,value)\n \n if arg_strings:\n vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR,[])\n getattr(namespace,_UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)\n \n \n \n \n \n \nclass FileType(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,mode='r',bufsize=-1,encoding=None ,errors=None ):\n self._mode=mode\n self._bufsize=bufsize\n self._encoding=encoding\n self._errors=errors\n \n def __call__(self,string):\n \n if string =='-':\n if 'r'in self._mode:\n return _sys.stdin\n elif 'w'in self._mode:\n return _sys.stdout\n else :\n msg=_('argument \"-\" with mode %r')%self._mode\n raise ValueError(msg)\n \n \n try :\n return open(string,self._mode,self._bufsize,self._encoding,\n self._errors)\n except OSError as e:\n message=_(\"can't open '%s': %s\")\n raise ArgumentTypeError(message %(string,e))\n \n def __repr__(self):\n args=self._mode,self._bufsize\n kwargs=[('encoding',self._encoding),('errors',self._errors)]\n args_str=', '.join([repr(arg)for arg in args if arg !=-1]+\n ['%s=%r'%(kw,arg)for kw,arg in kwargs\n if arg is not None ])\n return '%s(%s)'%(type(self).__name__,args_str)\n \n \n \n \n \nclass Namespace(_AttributeHolder):\n ''\n\n\n\n \n \n def __init__(self,**kwargs):\n for name in kwargs:\n setattr(self,name,kwargs[name])\n \n def __eq__(self,other):\n if not isinstance(other,Namespace):\n return NotImplemented\n return vars(self)==vars(other)\n \n def __contains__(self,key):\n return key in self.__dict__\n \n \nclass _ActionsContainer(object):\n\n def __init__(self,\n description,\n prefix_chars,\n argument_default,\n conflict_handler):\n super(_ActionsContainer,self).__init__()\n \n self.description=description\n self.argument_default=argument_default\n self.prefix_chars=prefix_chars\n self.conflict_handler=conflict_handler\n \n \n self._registries={}\n \n \n self.register('action',None ,_StoreAction)\n self.register('action','store',_StoreAction)\n self.register('action','store_const',_StoreConstAction)\n self.register('action','store_true',_StoreTrueAction)\n self.register('action','store_false',_StoreFalseAction)\n self.register('action','append',_AppendAction)\n self.register('action','append_const',_AppendConstAction)\n self.register('action','count',_CountAction)\n self.register('action','help',_HelpAction)\n self.register('action','version',_VersionAction)\n self.register('action','parsers',_SubParsersAction)\n \n \n self._get_handler()\n \n \n self._actions=[]\n self._option_string_actions={}\n \n \n self._action_groups=[]\n self._mutually_exclusive_groups=[]\n \n \n self._defaults={}\n \n \n self._negative_number_matcher=_re.compile(r'^-\\d+$|^-\\d*\\.\\d+$')\n \n \n \n self._has_negative_number_optionals=[]\n \n \n \n \n def register(self,registry_name,value,object):\n registry=self._registries.setdefault(registry_name,{})\n registry[value]=object\n \n def _registry_get(self,registry_name,value,default=None ):\n return self._registries[registry_name].get(value,default)\n \n \n \n \n def set_defaults(self,**kwargs):\n self._defaults.update(kwargs)\n \n \n \n for action in self._actions:\n if action.dest in kwargs:\n action.default=kwargs[action.dest]\n \n def get_default(self,dest):\n for action in self._actions:\n if action.dest ==dest and action.default is not None :\n return action.default\n return self._defaults.get(dest,None )\n \n \n \n \n \n def add_argument(self,*args,**kwargs):\n ''\n\n\n \n \n \n \n \n chars=self.prefix_chars\n if not args or len(args)==1 and args[0][0]not in chars:\n if args and 'dest'in kwargs:\n raise ValueError('dest supplied twice for positional argument')\n kwargs=self._get_positional_kwargs(*args,**kwargs)\n \n \n else :\n kwargs=self._get_optional_kwargs(*args,**kwargs)\n \n \n if 'default'not in kwargs:\n dest=kwargs['dest']\n if dest in self._defaults:\n kwargs['default']=self._defaults[dest]\n elif self.argument_default is not None :\n kwargs['default']=self.argument_default\n \n \n action_class=self._pop_action_class(kwargs)\n if not callable(action_class):\n raise ValueError('unknown action \"%s\"'%(action_class,))\n action=action_class(**kwargs)\n \n \n type_func=self._registry_get('type',action.type,action.type)\n if not callable(type_func):\n raise ValueError('%r is not callable'%(type_func,))\n \n \n if hasattr(self,\"_get_formatter\"):\n try :\n self._get_formatter()._format_args(action,None )\n except TypeError:\n raise ValueError(\"length of metavar tuple does not match nargs\")\n \n return self._add_action(action)\n \n def add_argument_group(self,*args,**kwargs):\n group=_ArgumentGroup(self,*args,**kwargs)\n self._action_groups.append(group)\n return group\n \n def add_mutually_exclusive_group(self,**kwargs):\n group=_MutuallyExclusiveGroup(self,**kwargs)\n self._mutually_exclusive_groups.append(group)\n return group\n \n def _add_action(self,action):\n \n self._check_conflict(action)\n \n \n self._actions.append(action)\n action.container=self\n \n \n for option_string in action.option_strings:\n self._option_string_actions[option_string]=action\n \n \n for option_string in action.option_strings:\n if self._negative_number_matcher.match(option_string):\n if not self._has_negative_number_optionals:\n self._has_negative_number_optionals.append(True )\n \n \n return action\n \n def _remove_action(self,action):\n self._actions.remove(action)\n \n def _add_container_actions(self,container):\n \n title_group_map={}\n for group in self._action_groups:\n if group.title in title_group_map:\n msg=_('cannot merge actions - two groups are named %r')\n raise ValueError(msg %(group.title))\n title_group_map[group.title]=group\n \n \n group_map={}\n for group in container._action_groups:\n \n \n \n if group.title not in title_group_map:\n title_group_map[group.title]=self.add_argument_group(\n title=group.title,\n description=group.description,\n conflict_handler=group.conflict_handler)\n \n \n for action in group._group_actions:\n group_map[action]=title_group_map[group.title]\n \n \n \n \n for group in container._mutually_exclusive_groups:\n mutex_group=self.add_mutually_exclusive_group(\n required=group.required)\n \n \n for action in group._group_actions:\n group_map[action]=mutex_group\n \n \n for action in container._actions:\n group_map.get(action,self)._add_action(action)\n \n def _get_positional_kwargs(self,dest,**kwargs):\n \n if 'required'in kwargs:\n msg=_(\"'required' is an invalid argument for positionals\")\n raise TypeError(msg)\n \n \n \n if kwargs.get('nargs')not in [OPTIONAL,ZERO_OR_MORE]:\n kwargs['required']=True\n if kwargs.get('nargs')==ZERO_OR_MORE and 'default'not in kwargs:\n kwargs['required']=True\n \n \n return dict(kwargs,dest=dest,option_strings=[])\n \n def _get_optional_kwargs(self,*args,**kwargs):\n \n option_strings=[]\n long_option_strings=[]\n for option_string in args:\n \n if not option_string[0]in self.prefix_chars:\n args={'option':option_string,\n 'prefix_chars':self.prefix_chars}\n msg=_('invalid option string %(option)r: '\n 'must start with a character %(prefix_chars)r')\n raise ValueError(msg %args)\n \n \n option_strings.append(option_string)\n if option_string[0]in self.prefix_chars:\n if len(option_string)>1:\n if option_string[1]in self.prefix_chars:\n long_option_strings.append(option_string)\n \n \n dest=kwargs.pop('dest',None )\n if dest is None :\n if long_option_strings:\n dest_option_string=long_option_strings[0]\n else :\n dest_option_string=option_strings[0]\n dest=dest_option_string.lstrip(self.prefix_chars)\n if not dest:\n msg=_('dest= is required for options like %r')\n raise ValueError(msg %option_string)\n dest=dest.replace('-','_')\n \n \n return dict(kwargs,dest=dest,option_strings=option_strings)\n \n def _pop_action_class(self,kwargs,default=None ):\n action=kwargs.pop('action',default)\n return self._registry_get('action',action,action)\n \n def _get_handler(self):\n \n handler_func_name='_handle_conflict_%s'%self.conflict_handler\n try :\n return getattr(self,handler_func_name)\n except AttributeError:\n msg=_('invalid conflict_resolution value: %r')\n raise ValueError(msg %self.conflict_handler)\n \n def _check_conflict(self,action):\n \n \n confl_optionals=[]\n for option_string in action.option_strings:\n if option_string in self._option_string_actions:\n confl_optional=self._option_string_actions[option_string]\n confl_optionals.append((option_string,confl_optional))\n \n \n if confl_optionals:\n conflict_handler=self._get_handler()\n conflict_handler(action,confl_optionals)\n \n def _handle_conflict_error(self,action,conflicting_actions):\n message=ngettext('conflicting option string: %s',\n 'conflicting option strings: %s',\n len(conflicting_actions))\n conflict_string=', '.join([option_string\n for option_string,action\n in conflicting_actions])\n raise ArgumentError(action,message %conflict_string)\n \n def _handle_conflict_resolve(self,action,conflicting_actions):\n \n \n for option_string,action in conflicting_actions:\n \n \n action.option_strings.remove(option_string)\n self._option_string_actions.pop(option_string,None )\n \n \n \n if not action.option_strings:\n action.container._remove_action(action)\n \n \nclass _ArgumentGroup(_ActionsContainer):\n\n def __init__(self,container,title=None ,description=None ,**kwargs):\n \n update=kwargs.setdefault\n update('conflict_handler',container.conflict_handler)\n update('prefix_chars',container.prefix_chars)\n update('argument_default',container.argument_default)\n super_init=super(_ArgumentGroup,self).__init__\n super_init(description=description,**kwargs)\n \n \n self.title=title\n self._group_actions=[]\n \n \n self._registries=container._registries\n self._actions=container._actions\n self._option_string_actions=container._option_string_actions\n self._defaults=container._defaults\n self._has_negative_number_optionals=\\\n container._has_negative_number_optionals\n self._mutually_exclusive_groups=container._mutually_exclusive_groups\n \n def _add_action(self,action):\n action=super(_ArgumentGroup,self)._add_action(action)\n self._group_actions.append(action)\n return action\n \n def _remove_action(self,action):\n super(_ArgumentGroup,self)._remove_action(action)\n self._group_actions.remove(action)\n \n \nclass _MutuallyExclusiveGroup(_ArgumentGroup):\n\n def __init__(self,container,required=False ):\n super(_MutuallyExclusiveGroup,self).__init__(container)\n self.required=required\n self._container=container\n \n def _add_action(self,action):\n if action.required:\n msg=_('mutually exclusive arguments must be optional')\n raise ValueError(msg)\n action=self._container._add_action(action)\n self._group_actions.append(action)\n return action\n \n def _remove_action(self,action):\n self._container._remove_action(action)\n self._group_actions.remove(action)\n \n \nclass ArgumentParser(_AttributeHolder,_ActionsContainer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,\n prog=None ,\n usage=None ,\n description=None ,\n epilog=None ,\n parents=[],\n formatter_class=HelpFormatter,\n prefix_chars='-',\n fromfile_prefix_chars=None ,\n argument_default=None ,\n conflict_handler='error',\n add_help=True ,\n allow_abbrev=True ):\n \n superinit=super(ArgumentParser,self).__init__\n superinit(description=description,\n prefix_chars=prefix_chars,\n argument_default=argument_default,\n conflict_handler=conflict_handler)\n \n \n if prog is None :\n prog=_os.path.basename(_sys.argv[0])\n \n self.prog=prog\n self.usage=usage\n self.epilog=epilog\n self.formatter_class=formatter_class\n self.fromfile_prefix_chars=fromfile_prefix_chars\n self.add_help=add_help\n self.allow_abbrev=allow_abbrev\n \n add_group=self.add_argument_group\n self._positionals=add_group(_('positional arguments'))\n self._optionals=add_group(_('optional arguments'))\n self._subparsers=None\n \n \n def identity(string):\n return string\n self.register('type',None ,identity)\n \n \n \n default_prefix='-'if '-'in prefix_chars else prefix_chars[0]\n if self.add_help:\n self.add_argument(\n default_prefix+'h',default_prefix *2+'help',\n action='help',default=SUPPRESS,\n help=_('show this help message and exit'))\n \n \n for parent in parents:\n self._add_container_actions(parent)\n try :\n defaults=parent._defaults\n except AttributeError:\n pass\n else :\n self._defaults.update(defaults)\n \n \n \n \n def _get_kwargs(self):\n names=[\n 'prog',\n 'usage',\n 'description',\n 'formatter_class',\n 'conflict_handler',\n 'add_help',\n ]\n return [(name,getattr(self,name))for name in names]\n \n \n \n \n def add_subparsers(self,**kwargs):\n if self._subparsers is not None :\n self.error(_('cannot have multiple subparser arguments'))\n \n \n kwargs.setdefault('parser_class',type(self))\n \n if 'title'in kwargs or 'description'in kwargs:\n title=_(kwargs.pop('title','subcommands'))\n description=_(kwargs.pop('description',None ))\n self._subparsers=self.add_argument_group(title,description)\n else :\n self._subparsers=self._positionals\n \n \n \n if kwargs.get('prog')is None :\n formatter=self._get_formatter()\n positionals=self._get_positional_actions()\n groups=self._mutually_exclusive_groups\n formatter.add_usage(self.usage,positionals,groups,'')\n kwargs['prog']=formatter.format_help().strip()\n \n \n parsers_class=self._pop_action_class(kwargs,'parsers')\n action=parsers_class(option_strings=[],**kwargs)\n self._subparsers._add_action(action)\n \n \n return action\n \n def _add_action(self,action):\n if action.option_strings:\n self._optionals._add_action(action)\n else :\n self._positionals._add_action(action)\n return action\n \n def _get_optional_actions(self):\n return [action\n for action in self._actions\n if action.option_strings]\n \n def _get_positional_actions(self):\n return [action\n for action in self._actions\n if not action.option_strings]\n \n \n \n \n def parse_args(self,args=None ,namespace=None ):\n args,argv=self.parse_known_args(args,namespace)\n if argv:\n msg=_('unrecognized arguments: %s')\n self.error(msg %' '.join(argv))\n return args\n \n def parse_known_args(self,args=None ,namespace=None ):\n if args is None :\n \n args=_sys.argv[1:]\n else :\n \n args=list(args)\n \n \n if namespace is None :\n namespace=Namespace()\n \n \n for action in self._actions:\n if action.dest is not SUPPRESS:\n if not hasattr(namespace,action.dest):\n if action.default is not SUPPRESS:\n setattr(namespace,action.dest,action.default)\n \n \n for dest in self._defaults:\n if not hasattr(namespace,dest):\n setattr(namespace,dest,self._defaults[dest])\n \n \n try :\n namespace,args=self._parse_known_args(args,namespace)\n if hasattr(namespace,_UNRECOGNIZED_ARGS_ATTR):\n args.extend(getattr(namespace,_UNRECOGNIZED_ARGS_ATTR))\n delattr(namespace,_UNRECOGNIZED_ARGS_ATTR)\n return namespace,args\n except ArgumentError:\n err=_sys.exc_info()[1]\n self.error(str(err))\n \n def _parse_known_args(self,arg_strings,namespace):\n \n if self.fromfile_prefix_chars is not None :\n arg_strings=self._read_args_from_files(arg_strings)\n \n \n \n action_conflicts={}\n for mutex_group in self._mutually_exclusive_groups:\n group_actions=mutex_group._group_actions\n for i,mutex_action in enumerate(mutex_group._group_actions):\n conflicts=action_conflicts.setdefault(mutex_action,[])\n conflicts.extend(group_actions[:i])\n conflicts.extend(group_actions[i+1:])\n \n \n \n \n option_string_indices={}\n arg_string_pattern_parts=[]\n arg_strings_iter=iter(arg_strings)\n for i,arg_string in enumerate(arg_strings_iter):\n \n \n if arg_string =='--':\n arg_string_pattern_parts.append('-')\n for arg_string in arg_strings_iter:\n arg_string_pattern_parts.append('A')\n \n \n \n else :\n option_tuple=self._parse_optional(arg_string)\n if option_tuple is None :\n pattern='A'\n else :\n option_string_indices[i]=option_tuple\n pattern='O'\n arg_string_pattern_parts.append(pattern)\n \n \n arg_strings_pattern=''.join(arg_string_pattern_parts)\n \n \n seen_actions=set()\n seen_non_default_actions=set()\n \n def take_action(action,argument_strings,option_string=None ):\n seen_actions.add(action)\n argument_values=self._get_values(action,argument_strings)\n \n \n \n \n if argument_values is not action.default:\n seen_non_default_actions.add(action)\n for conflict_action in action_conflicts.get(action,[]):\n if conflict_action in seen_non_default_actions:\n msg=_('not allowed with argument %s')\n action_name=_get_action_name(conflict_action)\n raise ArgumentError(action,msg %action_name)\n \n \n \n if argument_values is not SUPPRESS:\n action(self,namespace,argument_values,option_string)\n \n \n def consume_optional(start_index):\n \n \n option_tuple=option_string_indices[start_index]\n action,option_string,explicit_arg=option_tuple\n \n \n \n match_argument=self._match_argument\n action_tuples=[]\n while True :\n \n \n if action is None :\n extras.append(arg_strings[start_index])\n return start_index+1\n \n \n \n if explicit_arg is not None :\n arg_count=match_argument(action,'A')\n \n \n \n \n chars=self.prefix_chars\n if arg_count ==0 and option_string[1]not in chars:\n action_tuples.append((action,[],option_string))\n char=option_string[0]\n option_string=char+explicit_arg[0]\n new_explicit_arg=explicit_arg[1:]or None\n optionals_map=self._option_string_actions\n if option_string in optionals_map:\n action=optionals_map[option_string]\n explicit_arg=new_explicit_arg\n else :\n msg=_('ignored explicit argument %r')\n raise ArgumentError(action,msg %explicit_arg)\n \n \n \n elif arg_count ==1:\n stop=start_index+1\n args=[explicit_arg]\n action_tuples.append((action,args,option_string))\n break\n \n \n \n else :\n msg=_('ignored explicit argument %r')\n raise ArgumentError(action,msg %explicit_arg)\n \n \n \n \n else :\n start=start_index+1\n selected_patterns=arg_strings_pattern[start:]\n arg_count=match_argument(action,selected_patterns)\n stop=start+arg_count\n args=arg_strings[start:stop]\n action_tuples.append((action,args,option_string))\n break\n \n \n \n assert action_tuples\n for action,args,option_string in action_tuples:\n take_action(action,args,option_string)\n return stop\n \n \n \n positionals=self._get_positional_actions()\n \n \n def consume_positionals(start_index):\n \n match_partial=self._match_arguments_partial\n selected_pattern=arg_strings_pattern[start_index:]\n arg_counts=match_partial(positionals,selected_pattern)\n \n \n \n for action,arg_count in zip(positionals,arg_counts):\n args=arg_strings[start_index:start_index+arg_count]\n start_index +=arg_count\n take_action(action,args)\n \n \n \n positionals[:]=positionals[len(arg_counts):]\n return start_index\n \n \n \n extras=[]\n start_index=0\n if option_string_indices:\n max_option_string_index=max(option_string_indices)\n else :\n max_option_string_index=-1\n while start_index <=max_option_string_index:\n \n \n next_option_string_index=min([\n index\n for index in option_string_indices\n if index >=start_index])\n if start_index !=next_option_string_index:\n positionals_end_index=consume_positionals(start_index)\n \n \n \n if positionals_end_index >start_index:\n start_index=positionals_end_index\n continue\n else :\n start_index=positionals_end_index\n \n \n \n if start_index not in option_string_indices:\n strings=arg_strings[start_index:next_option_string_index]\n extras.extend(strings)\n start_index=next_option_string_index\n \n \n start_index=consume_optional(start_index)\n \n \n stop_index=consume_positionals(start_index)\n \n \n extras.extend(arg_strings[stop_index:])\n \n \n \n required_actions=[]\n for action in self._actions:\n if action not in seen_actions:\n if action.required:\n required_actions.append(_get_action_name(action))\n else :\n \n \n \n \n if (action.default is not None and\n isinstance(action.default,str)and\n hasattr(namespace,action.dest)and\n action.default is getattr(namespace,action.dest)):\n setattr(namespace,action.dest,\n self._get_value(action,action.default))\n \n if required_actions:\n self.error(_('the following arguments are required: %s')%\n ', '.join(required_actions))\n \n \n for group in self._mutually_exclusive_groups:\n if group.required:\n for action in group._group_actions:\n if action in seen_non_default_actions:\n break\n \n \n else :\n names=[_get_action_name(action)\n for action in group._group_actions\n if action.help is not SUPPRESS]\n msg=_('one of the arguments %s is required')\n self.error(msg %' '.join(names))\n \n \n return namespace,extras\n \n def _read_args_from_files(self,arg_strings):\n \n new_arg_strings=[]\n for arg_string in arg_strings:\n \n \n if not arg_string or arg_string[0]not in self.fromfile_prefix_chars:\n new_arg_strings.append(arg_string)\n \n \n else :\n try :\n with open(arg_string[1:])as args_file:\n arg_strings=[]\n for arg_line in args_file.read().splitlines():\n for arg in self.convert_arg_line_to_args(arg_line):\n arg_strings.append(arg)\n arg_strings=self._read_args_from_files(arg_strings)\n new_arg_strings.extend(arg_strings)\n except OSError:\n err=_sys.exc_info()[1]\n self.error(str(err))\n \n \n return new_arg_strings\n \n def convert_arg_line_to_args(self,arg_line):\n return [arg_line]\n \n def _match_argument(self,action,arg_strings_pattern):\n \n nargs_pattern=self._get_nargs_pattern(action)\n match=_re.match(nargs_pattern,arg_strings_pattern)\n \n \n if match is None :\n nargs_errors={\n None :_('expected one argument'),\n OPTIONAL:_('expected at most one argument'),\n ONE_OR_MORE:_('expected at least one argument'),\n }\n default=ngettext('expected %s argument',\n 'expected %s arguments',\n action.nargs)%action.nargs\n msg=nargs_errors.get(action.nargs,default)\n raise ArgumentError(action,msg)\n \n \n return len(match.group(1))\n \n def _match_arguments_partial(self,actions,arg_strings_pattern):\n \n \n result=[]\n for i in range(len(actions),0,-1):\n actions_slice=actions[:i]\n pattern=''.join([self._get_nargs_pattern(action)\n for action in actions_slice])\n match=_re.match(pattern,arg_strings_pattern)\n if match is not None :\n result.extend([len(string)for string in match.groups()])\n break\n \n \n return result\n \n def _parse_optional(self,arg_string):\n \n if not arg_string:\n return None\n \n \n if not arg_string[0]in self.prefix_chars:\n return None\n \n \n if arg_string in self._option_string_actions:\n action=self._option_string_actions[arg_string]\n return action,arg_string,None\n \n \n if len(arg_string)==1:\n return None\n \n \n if '='in arg_string:\n option_string,explicit_arg=arg_string.split('=',1)\n if option_string in self._option_string_actions:\n action=self._option_string_actions[option_string]\n return action,option_string,explicit_arg\n \n if self.allow_abbrev:\n \n \n option_tuples=self._get_option_tuples(arg_string)\n \n \n if len(option_tuples)>1:\n options=', '.join([option_string\n for action,option_string,explicit_arg in option_tuples])\n args={'option':arg_string,'matches':options}\n msg=_('ambiguous option: %(option)s could match %(matches)s')\n self.error(msg %args)\n \n \n \n elif len(option_tuples)==1:\n option_tuple,=option_tuples\n return option_tuple\n \n \n \n \n if self._negative_number_matcher.match(arg_string):\n if not self._has_negative_number_optionals:\n return None\n \n \n if ' 'in arg_string:\n return None\n \n \n \n return None ,arg_string,None\n \n def _get_option_tuples(self,option_string):\n result=[]\n \n \n \n chars=self.prefix_chars\n if option_string[0]in chars and option_string[1]in chars:\n if '='in option_string:\n option_prefix,explicit_arg=option_string.split('=',1)\n else :\n option_prefix=option_string\n explicit_arg=None\n for option_string in self._option_string_actions:\n if option_string.startswith(option_prefix):\n action=self._option_string_actions[option_string]\n tup=action,option_string,explicit_arg\n result.append(tup)\n \n \n \n \n elif option_string[0]in chars and option_string[1]not in chars:\n option_prefix=option_string\n explicit_arg=None\n short_option_prefix=option_string[:2]\n short_explicit_arg=option_string[2:]\n \n for option_string in self._option_string_actions:\n if option_string ==short_option_prefix:\n action=self._option_string_actions[option_string]\n tup=action,option_string,short_explicit_arg\n result.append(tup)\n elif option_string.startswith(option_prefix):\n action=self._option_string_actions[option_string]\n tup=action,option_string,explicit_arg\n result.append(tup)\n \n \n else :\n self.error(_('unexpected option string: %s')%option_string)\n \n \n return result\n \n def _get_nargs_pattern(self,action):\n \n \n nargs=action.nargs\n \n \n if nargs is None :\n nargs_pattern='(-*A-*)'\n \n \n elif nargs ==OPTIONAL:\n nargs_pattern='(-*A?-*)'\n \n \n elif nargs ==ZERO_OR_MORE:\n nargs_pattern='(-*[A-]*)'\n \n \n elif nargs ==ONE_OR_MORE:\n nargs_pattern='(-*A[A-]*)'\n \n \n elif nargs ==REMAINDER:\n nargs_pattern='([-AO]*)'\n \n \n elif nargs ==PARSER:\n nargs_pattern='(-*A[-AO]*)'\n \n \n elif nargs ==SUPPRESS:\n nargs_pattern='(-*-*)'\n \n \n else :\n nargs_pattern='(-*%s-*)'%'-*'.join('A'*nargs)\n \n \n if action.option_strings:\n nargs_pattern=nargs_pattern.replace('-*','')\n nargs_pattern=nargs_pattern.replace('-','')\n \n \n return nargs_pattern\n \n \n \n \n \n def parse_intermixed_args(self,args=None ,namespace=None ):\n args,argv=self.parse_known_intermixed_args(args,namespace)\n if argv:\n msg=_('unrecognized arguments: %s')\n self.error(msg %' '.join(argv))\n return args\n \n def parse_known_intermixed_args(self,args=None ,namespace=None ):\n \n \n \n \n \n \n \n \n \n \n \n \n positionals=self._get_positional_actions()\n a=[action for action in positionals\n if action.nargs in [PARSER,REMAINDER]]\n if a:\n raise TypeError('parse_intermixed_args: positional arg'\n ' with nargs=%s'%a[0].nargs)\n \n if [action.dest for group in self._mutually_exclusive_groups\n for action in group._group_actions if action in positionals]:\n raise TypeError('parse_intermixed_args: positional in'\n ' mutuallyExclusiveGroup')\n \n try :\n save_usage=self.usage\n try :\n if self.usage is None :\n \n self.usage=self.format_usage()[7:]\n for action in positionals:\n \n action.save_nargs=action.nargs\n \n action.nargs=SUPPRESS\n action.save_default=action.default\n action.default=SUPPRESS\n namespace,remaining_args=self.parse_known_args(args,\n namespace)\n for action in positionals:\n \n if (hasattr(namespace,action.dest)\n and getattr(namespace,action.dest)==[]):\n from warnings import warn\n warn('Do not expect %s in %s'%(action.dest,namespace))\n delattr(namespace,action.dest)\n finally :\n \n for action in positionals:\n action.nargs=action.save_nargs\n action.default=action.save_default\n optionals=self._get_optional_actions()\n try :\n \n \n for action in optionals:\n action.save_required=action.required\n action.required=False\n for group in self._mutually_exclusive_groups:\n group.save_required=group.required\n group.required=False\n namespace,extras=self.parse_known_args(remaining_args,\n namespace)\n finally :\n \n for action in optionals:\n action.required=action.save_required\n for group in self._mutually_exclusive_groups:\n group.required=group.save_required\n finally :\n self.usage=save_usage\n return namespace,extras\n \n \n \n \n def _get_values(self,action,arg_strings):\n \n if action.nargs not in [PARSER,REMAINDER]:\n try :\n arg_strings.remove('--')\n except ValueError:\n pass\n \n \n if not arg_strings and action.nargs ==OPTIONAL:\n if action.option_strings:\n value=action.const\n else :\n value=action.default\n if isinstance(value,str):\n value=self._get_value(action,value)\n self._check_value(action,value)\n \n \n \n elif (not arg_strings and action.nargs ==ZERO_OR_MORE and\n not action.option_strings):\n if action.default is not None :\n value=action.default\n else :\n value=arg_strings\n self._check_value(action,value)\n \n \n elif len(arg_strings)==1 and action.nargs in [None ,OPTIONAL]:\n arg_string,=arg_strings\n value=self._get_value(action,arg_string)\n self._check_value(action,value)\n \n \n elif action.nargs ==REMAINDER:\n value=[self._get_value(action,v)for v in arg_strings]\n \n \n elif action.nargs ==PARSER:\n value=[self._get_value(action,v)for v in arg_strings]\n self._check_value(action,value[0])\n \n \n elif action.nargs ==SUPPRESS:\n value=SUPPRESS\n \n \n else :\n value=[self._get_value(action,v)for v in arg_strings]\n for v in value:\n self._check_value(action,v)\n \n \n return value\n \n def _get_value(self,action,arg_string):\n type_func=self._registry_get('type',action.type,action.type)\n if not callable(type_func):\n msg=_('%r is not callable')\n raise ArgumentError(action,msg %type_func)\n \n \n try :\n result=type_func(arg_string)\n \n \n except ArgumentTypeError:\n name=getattr(action.type,'__name__',repr(action.type))\n msg=str(_sys.exc_info()[1])\n raise ArgumentError(action,msg)\n \n \n except (TypeError,ValueError):\n name=getattr(action.type,'__name__',repr(action.type))\n args={'type':name,'value':arg_string}\n msg=_('invalid %(type)s value: %(value)r')\n raise ArgumentError(action,msg %args)\n \n \n return result\n \n def _check_value(self,action,value):\n \n if action.choices is not None and value not in action.choices:\n args={'value':value,\n 'choices':', '.join(map(repr,action.choices))}\n msg=_('invalid choice: %(value)r (choose from %(choices)s)')\n raise ArgumentError(action,msg %args)\n \n \n \n \n def format_usage(self):\n formatter=self._get_formatter()\n formatter.add_usage(self.usage,self._actions,\n self._mutually_exclusive_groups)\n return formatter.format_help()\n \n def format_help(self):\n formatter=self._get_formatter()\n \n \n formatter.add_usage(self.usage,self._actions,\n self._mutually_exclusive_groups)\n \n \n formatter.add_text(self.description)\n \n \n for action_group in self._action_groups:\n formatter.start_section(action_group.title)\n formatter.add_text(action_group.description)\n formatter.add_arguments(action_group._group_actions)\n formatter.end_section()\n \n \n formatter.add_text(self.epilog)\n \n \n return formatter.format_help()\n \n def _get_formatter(self):\n return self.formatter_class(prog=self.prog)\n \n \n \n \n def print_usage(self,file=None ):\n if file is None :\n file=_sys.stdout\n self._print_message(self.format_usage(),file)\n \n def print_help(self,file=None ):\n if file is None :\n file=_sys.stdout\n self._print_message(self.format_help(),file)\n \n def _print_message(self,message,file=None ):\n if message:\n if file is None :\n file=_sys.stderr\n file.write(message)\n \n \n \n \n def exit(self,status=0,message=None ):\n if message:\n self._print_message(message,_sys.stderr)\n _sys.exit(status)\n \n def error(self,message):\n ''\n\n\n\n\n\n\n \n self.print_usage(_sys.stderr)\n args={'prog':self.prog,'message':message}\n self.exit(2,_('%(prog)s: error: %(message)s\\n')%args)\n", ["copy", "gettext", "os", "re", "sys", "textwrap", "warnings"]], "atexit": [".py", "''\n\n\n\n\n\nclass __loader__(object):\n pass\n \ndef _clear(*args,**kw):\n ''\n \n pass\n \ndef _run_exitfuncs(*args,**kw):\n ''\n \n pass\n \ndef register(*args,**kw):\n ''\n\n\n\n\n\n\n \n pass\n \ndef unregister(*args,**kw):\n ''\n\n\n\n \n pass\n", []], "base64": [".py", "#! /usr/bin/env python3\n\n\"\"\"Base16, Base32, Base64 (RFC 3548), Base85 and Ascii85 data encodings\"\"\"\n\n\n\n\n\nimport re\nimport struct\nimport binascii\n\n\n__all__=[\n\n'encode','decode','encodebytes','decodebytes',\n\n'b64encode','b64decode','b32encode','b32decode',\n'b16encode','b16decode',\n\n'b85encode','b85decode','a85encode','a85decode',\n\n'standard_b64encode','standard_b64decode',\n\n\n\n\n'urlsafe_b64encode','urlsafe_b64decode',\n]\n\n\nbytes_types=(bytes,bytearray)\n\ndef _bytes_from_decode_data(s):\n if isinstance(s,str):\n try :\n return s.encode('ascii')\n except UnicodeEncodeError:\n raise ValueError('string argument should contain only ASCII characters')\n if isinstance(s,bytes_types):\n return s\n try :\n return memoryview(s).tobytes()\n except TypeError:\n raise TypeError(\"argument should be a bytes-like object or ASCII \"\n \"string, not %r\"%s.__class__.__name__)from None\n \n \n \n \ndef b64encode(s,altchars=None ):\n ''\n\n\n\n\n \n encoded=binascii.b2a_base64(s,newline=False )\n if altchars is not None :\n assert len(altchars)==2,repr(altchars)\n return encoded.translate(bytes.maketrans(b'+/',altchars))\n return encoded\n \n \ndef b64decode(s,altchars=None ,validate=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n if altchars is not None :\n altchars=_bytes_from_decode_data(altchars)\n assert len(altchars)==2,repr(altchars)\n s=s.translate(bytes.maketrans(altchars,b'+/'))\n if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$',s):\n raise binascii.Error('Non-base64 digit found')\n return binascii.a2b_base64(s)\n \n \ndef standard_b64encode(s):\n ''\n\n\n \n return b64encode(s)\n \ndef standard_b64decode(s):\n ''\n\n\n\n\n\n \n return b64decode(s)\n \n \n_urlsafe_encode_translation=bytes.maketrans(b'+/',b'-_')\n_urlsafe_decode_translation=bytes.maketrans(b'-_',b'+/')\n\ndef urlsafe_b64encode(s):\n ''\n\n\n\n\n \n return b64encode(s).translate(_urlsafe_encode_translation)\n \ndef urlsafe_b64decode(s):\n ''\n\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n s=s.translate(_urlsafe_decode_translation)\n return b64decode(s)\n \n \n \n \n_b32alphabet=b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'\n_b32tab2=None\n_b32rev=None\n\ndef b32encode(s):\n ''\n \n global _b32tab2\n \n \n if _b32tab2 is None :\n b32tab=[bytes((i,))for i in _b32alphabet]\n _b32tab2=[a+b for a in b32tab for b in b32tab]\n b32tab=None\n \n if not isinstance(s,bytes_types):\n s=memoryview(s).tobytes()\n leftover=len(s)%5\n \n if leftover:\n s=s+b'\\0'*(5 -leftover)\n encoded=bytearray()\n from_bytes=int.from_bytes\n b32tab2=_b32tab2\n for i in range(0,len(s),5):\n c=from_bytes(s[i:i+5],'big')\n encoded +=(b32tab2[c >>30]+\n b32tab2[(c >>20)&0x3ff]+\n b32tab2[(c >>10)&0x3ff]+\n b32tab2[c&0x3ff]\n )\n \n if leftover ==1:\n encoded[-6:]=b'======'\n elif leftover ==2:\n encoded[-4:]=b'===='\n elif leftover ==3:\n encoded[-3:]=b'==='\n elif leftover ==4:\n encoded[-1:]=b'='\n return bytes(encoded)\n \ndef b32decode(s,casefold=False ,map01=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _b32rev\n \n \n if _b32rev is None :\n _b32rev={v:k for k,v in enumerate(_b32alphabet)}\n s=_bytes_from_decode_data(s)\n if len(s)%8:\n raise binascii.Error('Incorrect padding')\n \n \n \n if map01 is not None :\n map01=_bytes_from_decode_data(map01)\n assert len(map01)==1,repr(map01)\n s=s.translate(bytes.maketrans(b'01',b'O'+map01))\n if casefold:\n s=s.upper()\n \n \n \n l=len(s)\n s=s.rstrip(b'=')\n padchars=l -len(s)\n \n decoded=bytearray()\n b32rev=_b32rev\n for i in range(0,len(s),8):\n quanta=s[i:i+8]\n acc=0\n try :\n for c in quanta:\n acc=(acc <<5)+b32rev[c]\n except KeyError:\n raise binascii.Error('Non-base32 digit found')from None\n decoded +=acc.to_bytes(5,'big')\n \n if l %8 or padchars not in {0,1,3,4,6}:\n raise binascii.Error('Incorrect padding')\n if padchars and decoded:\n acc <<=5 *padchars\n last=acc.to_bytes(5,'big')\n leftover=(43 -5 *padchars)//8\n decoded[-5:]=last[:leftover]\n return bytes(decoded)\n \n \n \n \n \ndef b16encode(s):\n ''\n \n return binascii.hexlify(s).upper()\n \n \ndef b16decode(s,casefold=False ):\n ''\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n if casefold:\n s=s.upper()\n if re.search(b'[^0-9A-F]',s):\n raise binascii.Error('Non-base16 digit found')\n return binascii.unhexlify(s)\n \n \n \n \n \n_a85chars=None\n_a85chars2=None\n_A85START=b\"<~\"\n_A85END=b\"~>\"\n\ndef _85encode(b,chars,chars2,pad=False ,foldnuls=False ,foldspaces=False ):\n\n if not isinstance(b,bytes_types):\n b=memoryview(b).tobytes()\n \n padding=(-len(b))%4\n if padding:\n b=b+b'\\0'*padding\n words=struct.Struct('!%dI'%(len(b)//4)).unpack(b)\n \n chunks=[b'z'if foldnuls and not word else\n b'y'if foldspaces and word ==0x20202020 else\n (chars2[word //614125]+\n chars2[word //85 %7225]+\n chars[word %85])\n for word in words]\n \n if padding and not pad:\n if chunks[-1]==b'z':\n chunks[-1]=chars[0]*5\n chunks[-1]=chunks[-1][:-padding]\n \n return b''.join(chunks)\n \ndef a85encode(b,*,foldspaces=False ,wrapcol=0,pad=False ,adobe=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _a85chars,_a85chars2\n \n \n if _a85chars is None :\n _a85chars=[bytes((i,))for i in range(33,118)]\n _a85chars2=[(a+b)for a in _a85chars for b in _a85chars]\n \n result=_85encode(b,_a85chars,_a85chars2,pad,True ,foldspaces)\n \n if adobe:\n result=_A85START+result\n if wrapcol:\n wrapcol=max(2 if adobe else 1,wrapcol)\n chunks=[result[i:i+wrapcol]\n for i in range(0,len(result),wrapcol)]\n if adobe:\n if len(chunks[-1])+2 >wrapcol:\n chunks.append(b'')\n result=b'\\n'.join(chunks)\n if adobe:\n result +=_A85END\n \n return result\n \ndef a85decode(b,*,foldspaces=False ,adobe=False ,ignorechars=b' \\t\\n\\r\\v'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n b=_bytes_from_decode_data(b)\n if adobe:\n if not b.endswith(_A85END):\n raise ValueError(\n \"Ascii85 encoded byte sequences must end \"\n \"with {!r}\".format(_A85END)\n )\n if b.startswith(_A85START):\n b=b[2:-2]\n else :\n b=b[:-2]\n \n \n \n \n packI=struct.Struct('!I').pack\n decoded=[]\n decoded_append=decoded.append\n curr=[]\n curr_append=curr.append\n curr_clear=curr.clear\n for x in b+b'u'*4:\n if b'!'[0]<=x <=b'u'[0]:\n curr_append(x)\n if len(curr)==5:\n acc=0\n for x in curr:\n acc=85 *acc+(x -33)\n try :\n decoded_append(packI(acc))\n except struct.error:\n raise ValueError('Ascii85 overflow')from None\n curr_clear()\n elif x ==b'z'[0]:\n if curr:\n raise ValueError('z inside Ascii85 5-tuple')\n decoded_append(b'\\0\\0\\0\\0')\n elif foldspaces and x ==b'y'[0]:\n if curr:\n raise ValueError('y inside Ascii85 5-tuple')\n decoded_append(b'\\x20\\x20\\x20\\x20')\n elif x in ignorechars:\n \n continue\n else :\n raise ValueError('Non-Ascii85 digit found: %c'%x)\n \n result=b''.join(decoded)\n padding=4 -len(curr)\n if padding:\n \n result=result[:-padding]\n return result\n \n \n \n_b85alphabet=(b\"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\nb\"abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~\")\n_b85chars=None\n_b85chars2=None\n_b85dec=None\n\ndef b85encode(b,pad=False ):\n ''\n\n\n\n \n global _b85chars,_b85chars2\n \n \n if _b85chars is None :\n _b85chars=[bytes((i,))for i in _b85alphabet]\n _b85chars2=[(a+b)for a in _b85chars for b in _b85chars]\n return _85encode(b,_b85chars,_b85chars2,pad)\n \ndef b85decode(b):\n ''\n\n\n \n global _b85dec\n \n \n if _b85dec is None :\n _b85dec=[None ]*256\n for i,c in enumerate(_b85alphabet):\n _b85dec[c]=i\n \n b=_bytes_from_decode_data(b)\n padding=(-len(b))%5\n b=b+b'~'*padding\n out=[]\n packI=struct.Struct('!I').pack\n for i in range(0,len(b),5):\n chunk=b[i:i+5]\n acc=0\n try :\n for c in chunk:\n acc=acc *85+_b85dec[c]\n except TypeError:\n for j,c in enumerate(chunk):\n if _b85dec[c]is None :\n raise ValueError('bad base85 character at position %d'\n %(i+j))from None\n raise\n try :\n out.append(packI(acc))\n except struct.error:\n raise ValueError('base85 overflow in hunk starting at byte %d'\n %i)from None\n \n result=b''.join(out)\n if padding:\n result=result[:-padding]\n return result\n \n \n \n \n \nMAXLINESIZE=76\nMAXBINSIZE=(MAXLINESIZE //4)*3\n\ndef encode(input,output):\n ''\n while True :\n s=input.read(MAXBINSIZE)\n if not s:\n break\n while len(s)\":\n return filename\n canonic=self.fncache.get(filename)\n if not canonic:\n canonic=os.path.abspath(filename)\n canonic=os.path.normcase(canonic)\n self.fncache[filename]=canonic\n return canonic\n \n def reset(self):\n ''\n import linecache\n linecache.checkcache()\n self.botframe=None\n self._set_stopinfo(None ,None )\n \n def trace_dispatch(self,frame,event,arg):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.quitting:\n return\n if event =='line':\n return self.dispatch_line(frame)\n if event =='call':\n return self.dispatch_call(frame,arg)\n if event =='return':\n return self.dispatch_return(frame,arg)\n if event =='exception':\n return self.dispatch_exception(frame,arg)\n if event =='c_call':\n return self.trace_dispatch\n if event =='c_exception':\n return self.trace_dispatch\n if event =='c_return':\n return self.trace_dispatch\n print('bdb.Bdb.dispatch: unknown debugging event:',repr(event))\n return self.trace_dispatch\n \n def dispatch_line(self,frame):\n ''\n\n\n\n\n \n if self.stop_here(frame)or self.break_here(frame):\n self.user_line(frame)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_call(self,frame,arg):\n ''\n\n\n\n\n \n \n if self.botframe is None :\n \n self.botframe=frame.f_back\n return self.trace_dispatch\n if not (self.stop_here(frame)or self.break_anywhere(frame)):\n \n return\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n self.user_call(frame,arg)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_return(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame)or frame ==self.returnframe:\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n try :\n self.frame_returning=frame\n self.user_return(frame,arg)\n finally :\n self.frame_returning=None\n if self.quitting:raise BdbQuit\n \n if self.stopframe is frame and self.stoplineno !=-1:\n self._set_stopinfo(None ,None )\n return self.trace_dispatch\n \n def dispatch_exception(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame):\n \n \n \n if not (frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]is StopIteration and arg[2]is None ):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n \n \n \n elif (self.stopframe and frame is not self.stopframe\n and self.stopframe.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]in (StopIteration,GeneratorExit)):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n return self.trace_dispatch\n \n \n \n \n \n def is_skipped_module(self,module_name):\n ''\n for pattern in self.skip:\n if fnmatch.fnmatch(module_name,pattern):\n return True\n return False\n \n def stop_here(self,frame):\n ''\n \n \n if self.skip and\\\n self.is_skipped_module(frame.f_globals.get('__name__')):\n return False\n if frame is self.stopframe:\n if self.stoplineno ==-1:\n return False\n return frame.f_lineno >=self.stoplineno\n if not self.stopframe:\n return True\n return False\n \n def break_here(self,frame):\n ''\n\n\n\n \n filename=self.canonic(frame.f_code.co_filename)\n if filename not in self.breaks:\n return False\n lineno=frame.f_lineno\n if lineno not in self.breaks[filename]:\n \n \n lineno=frame.f_code.co_firstlineno\n if lineno not in self.breaks[filename]:\n return False\n \n \n (bp,flag)=effective(filename,lineno,frame)\n if bp:\n self.currentbp=bp.number\n if (flag and bp.temporary):\n self.do_clear(str(bp.number))\n return True\n else :\n return False\n \n def do_clear(self,arg):\n ''\n\n\n \n raise NotImplementedError(\"subclass of bdb must implement do_clear()\")\n \n def break_anywhere(self,frame):\n ''\n \n return self.canonic(frame.f_code.co_filename)in self.breaks\n \n \n \n \n def user_call(self,frame,argument_list):\n ''\n pass\n \n def user_line(self,frame):\n ''\n pass\n \n def user_return(self,frame,return_value):\n ''\n pass\n \n def user_exception(self,frame,exc_info):\n ''\n pass\n \n def _set_stopinfo(self,stopframe,returnframe,stoplineno=0):\n ''\n\n\n\n\n \n self.stopframe=stopframe\n self.returnframe=returnframe\n self.quitting=False\n \n \n self.stoplineno=stoplineno\n \n \n \n \n def set_until(self,frame,lineno=None ):\n ''\n \n \n if lineno is None :\n lineno=frame.f_lineno+1\n self._set_stopinfo(frame,frame,lineno)\n \n def set_step(self):\n ''\n \n \n \n \n if self.frame_returning:\n caller_frame=self.frame_returning.f_back\n if caller_frame and not caller_frame.f_trace:\n caller_frame.f_trace=self.trace_dispatch\n self._set_stopinfo(None ,None )\n \n def set_next(self,frame):\n ''\n self._set_stopinfo(frame,None )\n \n def set_return(self,frame):\n ''\n if frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n self._set_stopinfo(frame,None ,-1)\n else :\n self._set_stopinfo(frame.f_back,frame)\n \n def set_trace(self,frame=None ):\n ''\n\n\n \n if frame is None :\n frame=sys._getframe().f_back\n self.reset()\n while frame:\n frame.f_trace=self.trace_dispatch\n self.botframe=frame\n frame=frame.f_back\n self.set_step()\n sys.settrace(self.trace_dispatch)\n \n def set_continue(self):\n ''\n\n\n \n \n self._set_stopinfo(self.botframe,None ,-1)\n if not self.breaks:\n \n sys.settrace(None )\n frame=sys._getframe().f_back\n while frame and frame is not self.botframe:\n del frame.f_trace\n frame=frame.f_back\n \n def set_quit(self):\n ''\n\n\n \n self.stopframe=self.botframe\n self.returnframe=None\n self.quitting=True\n sys.settrace(None )\n \n \n \n \n \n \n \n \n def set_break(self,filename,lineno,temporary=False ,cond=None ,\n funcname=None ):\n ''\n\n\n\n \n filename=self.canonic(filename)\n import linecache\n line=linecache.getline(filename,lineno)\n if not line:\n return 'Line %s:%d does not exist'%(filename,lineno)\n list=self.breaks.setdefault(filename,[])\n if lineno not in list:\n list.append(lineno)\n bp=Breakpoint(filename,lineno,temporary,cond,funcname)\n return None\n \n def _prune_breaks(self,filename,lineno):\n ''\n\n\n\n\n\n \n if (filename,lineno)not in Breakpoint.bplist:\n self.breaks[filename].remove(lineno)\n if not self.breaks[filename]:\n del self.breaks[filename]\n \n def clear_break(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n if lineno not in self.breaks[filename]:\n return 'There is no breakpoint at %s:%d'%(filename,lineno)\n \n \n for bp in Breakpoint.bplist[filename,lineno][:]:\n bp.deleteMe()\n self._prune_breaks(filename,lineno)\n return None\n \n def clear_bpbynumber(self,arg):\n ''\n\n\n \n try :\n bp=self.get_bpbynumber(arg)\n except ValueError as err:\n return str(err)\n bp.deleteMe()\n self._prune_breaks(bp.file,bp.line)\n return None\n \n def clear_all_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n for line in self.breaks[filename]:\n blist=Breakpoint.bplist[filename,line]\n for bp in blist:\n bp.deleteMe()\n del self.breaks[filename]\n return None\n \n def clear_all_breaks(self):\n ''\n\n\n \n if not self.breaks:\n return 'There are no breakpoints'\n for bp in Breakpoint.bpbynumber:\n if bp:\n bp.deleteMe()\n self.breaks={}\n return None\n \n def get_bpbynumber(self,arg):\n ''\n\n\n\n \n if not arg:\n raise ValueError('Breakpoint number expected')\n try :\n number=int(arg)\n except ValueError:\n raise ValueError('Non-numeric breakpoint number %s'%arg)from None\n try :\n bp=Breakpoint.bpbynumber[number]\n except IndexError:\n raise ValueError('Breakpoint number %d out of range'%number)from None\n if bp is None :\n raise ValueError('Breakpoint %d already deleted'%number)\n return bp\n \n def get_break(self,filename,lineno):\n ''\n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]\n \n def get_breaks(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]and\\\n Breakpoint.bplist[filename,lineno]or []\n \n def get_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename in self.breaks:\n return self.breaks[filename]\n else :\n return []\n \n def get_all_breaks(self):\n ''\n return self.breaks\n \n \n \n \n def get_stack(self,f,t):\n ''\n\n\n\n \n stack=[]\n if t and t.tb_frame is f:\n t=t.tb_next\n while f is not None :\n stack.append((f,f.f_lineno))\n if f is self.botframe:\n break\n f=f.f_back\n stack.reverse()\n i=max(0,len(stack)-1)\n while t is not None :\n stack.append((t.tb_frame,t.tb_lineno))\n t=t.tb_next\n if f is None :\n i=max(0,len(stack)-1)\n return stack,i\n \n def format_stack_entry(self,frame_lineno,lprefix=': '):\n ''\n\n\n\n\n\n\n \n import linecache,reprlib\n frame,lineno=frame_lineno\n filename=self.canonic(frame.f_code.co_filename)\n s='%s(%r)'%(filename,lineno)\n if frame.f_code.co_name:\n s +=frame.f_code.co_name\n else :\n s +=\"\"\n if '__args__'in frame.f_locals:\n args=frame.f_locals['__args__']\n else :\n args=None\n if args:\n s +=reprlib.repr(args)\n else :\n s +='()'\n if '__return__'in frame.f_locals:\n rv=frame.f_locals['__return__']\n s +='->'\n s +=reprlib.repr(rv)\n line=linecache.getline(filename,lineno,frame.f_globals)\n if line:\n s +=lprefix+line.strip()\n return s\n \n \n \n \n \n def run(self,cmd,globals=None ,locals=None ):\n ''\n\n\n \n if globals is None :\n import __main__\n globals=__main__.__dict__\n if locals is None :\n locals=globals\n self.reset()\n if isinstance(cmd,str):\n cmd=compile(cmd,\"\",\"exec\")\n sys.settrace(self.trace_dispatch)\n try :\n exec(cmd,globals,locals)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n \n def runeval(self,expr,globals=None ,locals=None ):\n ''\n\n\n \n if globals is None :\n import __main__\n globals=__main__.__dict__\n if locals is None :\n locals=globals\n self.reset()\n sys.settrace(self.trace_dispatch)\n try :\n return eval(expr,globals,locals)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n \n def runctx(self,cmd,globals,locals):\n ''\n \n self.run(cmd,globals,locals)\n \n \n \n def runcall(self,func,*args,**kwds):\n ''\n\n\n \n self.reset()\n sys.settrace(self.trace_dispatch)\n res=None\n try :\n res=func(*args,**kwds)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n return res\n \n \ndef set_trace():\n ''\n Bdb().set_trace()\n \n \nclass Breakpoint:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n next=1\n bplist={}\n bpbynumber=[None ]\n \n \n \n def __init__(self,file,line,temporary=False ,cond=None ,funcname=None ):\n self.funcname=funcname\n \n self.func_first_executable_line=None\n self.file=file\n self.line=line\n self.temporary=temporary\n self.cond=cond\n self.enabled=True\n self.ignore=0\n self.hits=0\n self.number=Breakpoint.next\n Breakpoint.next +=1\n \n self.bpbynumber.append(self)\n if (file,line)in self.bplist:\n self.bplist[file,line].append(self)\n else :\n self.bplist[file,line]=[self]\n \n def deleteMe(self):\n ''\n\n\n\n \n \n index=(self.file,self.line)\n self.bpbynumber[self.number]=None\n self.bplist[index].remove(self)\n if not self.bplist[index]:\n \n del self.bplist[index]\n \n def enable(self):\n ''\n self.enabled=True\n \n def disable(self):\n ''\n self.enabled=False\n \n def bpprint(self,out=None ):\n ''\n\n\n\n \n if out is None :\n out=sys.stdout\n print(self.bpformat(),file=out)\n \n def bpformat(self):\n ''\n\n\n\n\n\n \n if self.temporary:\n disp='del '\n else :\n disp='keep '\n if self.enabled:\n disp=disp+'yes '\n else :\n disp=disp+'no '\n ret='%-4dbreakpoint %s at %s:%d'%(self.number,disp,\n self.file,self.line)\n if self.cond:\n ret +='\\n\\tstop only if %s'%(self.cond,)\n if self.ignore:\n ret +='\\n\\tignore next %d hits'%(self.ignore,)\n if self.hits:\n if self.hits >1:\n ss='s'\n else :\n ss=''\n ret +='\\n\\tbreakpoint already hit %d time%s'%(self.hits,ss)\n return ret\n \n def __str__(self):\n ''\n return 'breakpoint %s at %s:%s'%(self.number,self.file,self.line)\n \n \n \n \ndef checkfuncname(b,frame):\n ''\n\n\n\n\n\n \n if not b.funcname:\n \n if b.line !=frame.f_lineno:\n \n \n return False\n return True\n \n \n if frame.f_code.co_name !=b.funcname:\n \n return False\n \n \n if not b.func_first_executable_line:\n \n b.func_first_executable_line=frame.f_lineno\n \n if b.func_first_executable_line !=frame.f_lineno:\n \n return False\n return True\n \n \n \n \ndef effective(file,line,frame):\n ''\n\n\n\n\n\n \n possibles=Breakpoint.bplist[file,line]\n for b in possibles:\n if not b.enabled:\n continue\n if not checkfuncname(b,frame):\n continue\n \n b.hits +=1\n if not b.cond:\n \n if b.ignore >0:\n b.ignore -=1\n continue\n else :\n \n return (b,True )\n else :\n \n \n \n try :\n val=eval(b.cond,frame.f_globals,frame.f_locals)\n if val:\n if b.ignore >0:\n b.ignore -=1\n \n else :\n return (b,True )\n \n \n except :\n \n \n \n return (b,False )\n return (None ,None )\n \n \n \n \nclass Tdb(Bdb):\n def user_call(self,frame,args):\n name=frame.f_code.co_name\n if not name:name='???'\n print('+++ call',name,args)\n def user_line(self,frame):\n import linecache\n name=frame.f_code.co_name\n if not name:name='???'\n fn=self.canonic(frame.f_code.co_filename)\n line=linecache.getline(fn,frame.f_lineno,frame.f_globals)\n print('+++',fn,frame.f_lineno,name,':',line.strip())\n def user_return(self,frame,retval):\n print('+++ return',retval)\n def user_exception(self,frame,exc_stuff):\n print('+++ exception',exc_stuff)\n self.set_continue()\n \ndef foo(n):\n print('foo(',n,')')\n x=bar(n *10)\n print('bar returned',x)\n \ndef bar(a):\n print('bar(',a,')')\n return a /2\n \ndef test():\n t=Tdb()\n t.run('import bdb; bdb.foo(10)')\n", ["__main__", "fnmatch", "inspect", "linecache", "os", "reprlib", "sys"]], "binascii": [".py", "''\n\n\n\n\n\n\n\nimport _base64\n\nfrom _binascii import *\n\nclass Error(ValueError):\n def __init__(self,msg=''):\n self._msg=msg\n \n def __str__(self):\n return \" binascii.Error: \"+self._msg\n \n \nclass Done(Exception):\n pass\n \nclass Incomplete(Error):\n pass\n \ndef a2b_uu(s):\n if not s:\n return ''\n \n length=(ord(s[0])-0x20)%64\n \n def quadruplets_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2]),ord(s[3])\n except IndexError:\n s +=' '\n yield ord(s[0]),ord(s[1]),ord(s[2]),ord(s[3])\n return\n s=s[4:]\n \n try :\n result=[''.join(\n [chr((A -0x20)<<2 |(((B -0x20)>>4)&0x3)),\n chr(((B -0x20)&0xf)<<4 |(((C -0x20)>>2)&0xf)),\n chr(((C -0x20)&0x3)<<6 |((D -0x20)&0x3f))\n ])for A,B,C,D in quadruplets_gen(s[1:].rstrip())]\n except ValueError:\n raise Error('Illegal char')\n result=''.join(result)\n trailingdata=result[length:]\n if trailingdata.strip('\\x00'):\n raise Error('Trailing garbage')\n result=result[:length]\n if len(result)>2)&0x3F],\n table_b2a_base64[((A <<4)|((B >>4)&0xF))&0x3F],\n table_b2a_base64[((B <<2)|((C >>6)&0x3))&0x3F],\n table_b2a_base64[(C)&0x3F]])\n for A,B,C in a]\n \n final=s[length -final_length:]\n if final_length ==0:\n snippet=''\n elif final_length ==1:\n a=final[0]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[(a <<4)&0x3F]+'=='\n else :\n a=final[0]\n b=final[1]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[((a <<4)|(b >>4)&0xF)&0x3F]+\\\n table_b2a_base64[(b <<2)&0x3F]+'='\n \n result=''.join(result)+snippet\n if newline:\n result +='\\n'\n return bytes(result,__BRYTHON__.charset)\n \ndef a2b_qp(s,header=False ):\n inp=0\n odata=[]\n while inp =len(s):\n break\n \n if (s[inp]=='\\n')or (s[inp]=='\\r'):\n if s[inp]!='\\n':\n while inp 0 and data[lf -1]=='\\r'\n \n inp=0\n linelen=0\n odata=[]\n while inp '~'or\n c =='='or\n (header and c =='_')or\n (c =='.'and linelen ==0 and (inp+1 ==len(data)or\n data[inp+1]=='\\n'or\n data[inp+1]=='\\r'))or\n (not istext and (c =='\\r'or c =='\\n'))or\n ((c =='\\t'or c ==' ')and (inp+1 ==len(data)))or\n (c <=' 'and c !='\\r'and c !='\\n'and\n (quotetabs or (not quotetabs and (c !='\\t'and c !=' '))))):\n linelen +=3\n if linelen >=MAXLINESIZE:\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=3\n odata.append('='+two_hex_digits(ord(c)))\n inp +=1\n else :\n if (istext and\n (c =='\\n'or (inp+1 0 and\n (odata[-1]==' 'or odata[-1]=='\\t')):\n ch=ord(odata[-1])\n odata[-1]='='\n odata.append(two_hex_digits(ch))\n \n if crlf:odata.append('\\r')\n odata.append('\\n')\n if c =='\\r':\n inp +=2\n else :\n inp +=1\n else :\n if (inp+1 =MAXLINESIZE):\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=0\n \n linelen +=1\n if header and c ==' ':\n c='_'\n odata.append(c)\n inp +=1\n return ''.join(odata)\n \nhex_numbers='0123456789ABCDEF'\ndef hex(n):\n if n ==0:\n return '0'\n \n if n <0:\n n=-n\n sign='-'\n else :\n sign=''\n arr=[]\n \n def hex_gen(n):\n ''\n while n:\n yield n %0x10\n n=n /0x10\n \n for nibble in hex_gen(n):\n arr=[hex_numbers[nibble]]+arr\n return sign+''.join(arr)\n \ndef two_hex_digits(n):\n return hex_numbers[n /0x10]+hex_numbers[n %0x10]\n \n \ndef strhex_to_int(s):\n i=0\n for c in s:\n i=i *0x10+hex_numbers.index(c)\n return i\n \nhqx_encoding='!\"#$%&\\'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr'\n\nDONE=0x7f\nSKIP=0x7e\nFAIL=0x7d\n\ntable_a2b_hqx=[\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,SKIP,FAIL,FAIL,SKIP,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,0x00,0x01,0x02,0x03,0x04,0x05,0x06,\n\n0x07,0x08,0x09,0x0A,0x0B,0x0C,FAIL,FAIL,\n\n0x0D,0x0E,0x0F,0x10,0x11,0x12,0x13,FAIL,\n\n0x14,0x15,DONE,FAIL,FAIL,FAIL,FAIL,FAIL,\n\n0x16,0x17,0x18,0x19,0x1A,0x1B,0x1C,0x1D,\n\n0x1E,0x1F,0x20,0x21,0x22,0x23,0x24,FAIL,\n\n0x25,0x26,0x27,0x28,0x29,0x2A,0x2B,FAIL,\n\n0x2C,0x2D,0x2E,0x2F,FAIL,FAIL,FAIL,FAIL,\n\n0x30,0x31,0x32,0x33,0x34,0x35,0x36,FAIL,\n\n0x37,0x38,0x39,0x3A,0x3B,0x3C,FAIL,FAIL,\n\n0x3D,0x3E,0x3F,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n]\n\ndef a2b_hqx(s):\n result=[]\n \n def quadruples_gen(s):\n t=[]\n for c in s:\n res=table_a2b_hqx[ord(c)]\n if res ==SKIP:\n continue\n elif res ==FAIL:\n raise Error('Illegal character')\n elif res ==DONE:\n yield t\n raise Done\n else :\n t.append(res)\n if len(t)==4:\n yield t\n t=[]\n yield t\n \n done=0\n try :\n for snippet in quadruples_gen(s):\n length=len(snippet)\n if length ==4:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n result.append(chr(((snippet[2]&0x03)<<6)|(snippet[3])))\n elif length ==3:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n elif length ==2:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n except Done:\n done=1\n except Error:\n raise\n return (''.join(result),done)\n \n \n \ndef b2a_hqx(s):\n result=[]\n \n def triples_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2])\n except IndexError:\n yield tuple([ord(c)for c in s])\n s=s[3:]\n \n for snippet in triples_gen(s):\n length=len(snippet)\n if length ==3:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2 |((snippet[2]&0xc0)>>6)])\n result.append(hqx_encoding[snippet[2]&0x3f])\n elif length ==2:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2])\n elif length ==1:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)])\n return ''.join(result)\n \ncrctab_hqx=[\n0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,\n0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,\n0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,\n0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,\n0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,\n0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,\n0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,\n0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,\n0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,\n0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,\n0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,\n0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,\n0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,\n0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,\n0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,\n0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,\n0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,\n0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,\n0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,\n0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,\n0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,\n0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,\n0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,\n0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,\n0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,\n0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,\n0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,\n0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,\n0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,\n0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,\n0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,\n0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0,\n]\n\ndef crc_hqx(s,crc):\n for c in s:\n crc=((crc <<8)&0xff00)^crctab_hqx[((crc >>8)&0xff)^ord(c)]\n \n return crc\n \ndef rlecode_hqx(s):\n ''\n\n\n\n \n if not s:\n return ''\n result=[]\n prev=s[0]\n count=1\n \n \n \n \n if s[-1]=='!':\n s=s[1:]+'?'\n else :\n s=s[1:]+'!'\n \n for c in s:\n if c ==prev and count <255:\n count +=1\n else :\n if count ==1:\n if prev !='\\x90':\n result.append(prev)\n else :\n result.extend(['\\x90','\\x00'])\n elif count <4:\n if prev !='\\x90':\n result.extend([prev]*count)\n else :\n result.extend(['\\x90','\\x00']*count)\n else :\n if prev !='\\x90':\n result.extend([prev,'\\x90',chr(count)])\n else :\n result.extend(['\\x90','\\x00','\\x90',chr(count)])\n count=1\n prev=c\n \n return ''.join(result)\n \ndef rledecode_hqx(s):\n s=s.split('\\x90')\n result=[s[0]]\n prev=s[0]\n for snippet in s[1:]:\n count=ord(snippet[0])\n if count >0:\n result.append(prev[-1]*(count -1))\n prev=snippet\n else :\n result.append('\\x90')\n prev='\\x90'\n result.append(snippet[1:])\n \n return ''.join(result)\n \ncrc_32_tab=[\n0x00000000,0x77073096,0xee0e612c,0x990951ba,0x076dc419,\n0x706af48f,0xe963a535,0x9e6495a3,0x0edb8832,0x79dcb8a4,\n0xe0d5e91e,0x97d2d988,0x09b64c2b,0x7eb17cbd,0xe7b82d07,\n0x90bf1d91,0x1db71064,0x6ab020f2,0xf3b97148,0x84be41de,\n0x1adad47d,0x6ddde4eb,0xf4d4b551,0x83d385c7,0x136c9856,\n0x646ba8c0,0xfd62f97a,0x8a65c9ec,0x14015c4f,0x63066cd9,\n0xfa0f3d63,0x8d080df5,0x3b6e20c8,0x4c69105e,0xd56041e4,\n0xa2677172,0x3c03e4d1,0x4b04d447,0xd20d85fd,0xa50ab56b,\n0x35b5a8fa,0x42b2986c,0xdbbbc9d6,0xacbcf940,0x32d86ce3,\n0x45df5c75,0xdcd60dcf,0xabd13d59,0x26d930ac,0x51de003a,\n0xc8d75180,0xbfd06116,0x21b4f4b5,0x56b3c423,0xcfba9599,\n0xb8bda50f,0x2802b89e,0x5f058808,0xc60cd9b2,0xb10be924,\n0x2f6f7c87,0x58684c11,0xc1611dab,0xb6662d3d,0x76dc4190,\n0x01db7106,0x98d220bc,0xefd5102a,0x71b18589,0x06b6b51f,\n0x9fbfe4a5,0xe8b8d433,0x7807c9a2,0x0f00f934,0x9609a88e,\n0xe10e9818,0x7f6a0dbb,0x086d3d2d,0x91646c97,0xe6635c01,\n0x6b6b51f4,0x1c6c6162,0x856530d8,0xf262004e,0x6c0695ed,\n0x1b01a57b,0x8208f4c1,0xf50fc457,0x65b0d9c6,0x12b7e950,\n0x8bbeb8ea,0xfcb9887c,0x62dd1ddf,0x15da2d49,0x8cd37cf3,\n0xfbd44c65,0x4db26158,0x3ab551ce,0xa3bc0074,0xd4bb30e2,\n0x4adfa541,0x3dd895d7,0xa4d1c46d,0xd3d6f4fb,0x4369e96a,\n0x346ed9fc,0xad678846,0xda60b8d0,0x44042d73,0x33031de5,\n0xaa0a4c5f,0xdd0d7cc9,0x5005713c,0x270241aa,0xbe0b1010,\n0xc90c2086,0x5768b525,0x206f85b3,0xb966d409,0xce61e49f,\n0x5edef90e,0x29d9c998,0xb0d09822,0xc7d7a8b4,0x59b33d17,\n0x2eb40d81,0xb7bd5c3b,0xc0ba6cad,0xedb88320,0x9abfb3b6,\n0x03b6e20c,0x74b1d29a,0xead54739,0x9dd277af,0x04db2615,\n0x73dc1683,0xe3630b12,0x94643b84,0x0d6d6a3e,0x7a6a5aa8,\n0xe40ecf0b,0x9309ff9d,0x0a00ae27,0x7d079eb1,0xf00f9344,\n0x8708a3d2,0x1e01f268,0x6906c2fe,0xf762575d,0x806567cb,\n0x196c3671,0x6e6b06e7,0xfed41b76,0x89d32be0,0x10da7a5a,\n0x67dd4acc,0xf9b9df6f,0x8ebeeff9,0x17b7be43,0x60b08ed5,\n0xd6d6a3e8,0xa1d1937e,0x38d8c2c4,0x4fdff252,0xd1bb67f1,\n0xa6bc5767,0x3fb506dd,0x48b2364b,0xd80d2bda,0xaf0a1b4c,\n0x36034af6,0x41047a60,0xdf60efc3,0xa867df55,0x316e8eef,\n0x4669be79,0xcb61b38c,0xbc66831a,0x256fd2a0,0x5268e236,\n0xcc0c7795,0xbb0b4703,0x220216b9,0x5505262f,0xc5ba3bbe,\n0xb2bd0b28,0x2bb45a92,0x5cb36a04,0xc2d7ffa7,0xb5d0cf31,\n0x2cd99e8b,0x5bdeae1d,0x9b64c2b0,0xec63f226,0x756aa39c,\n0x026d930a,0x9c0906a9,0xeb0e363f,0x72076785,0x05005713,\n0x95bf4a82,0xe2b87a14,0x7bb12bae,0x0cb61b38,0x92d28e9b,\n0xe5d5be0d,0x7cdcefb7,0x0bdbdf21,0x86d3d2d4,0xf1d4e242,\n0x68ddb3f8,0x1fda836e,0x81be16cd,0xf6b9265b,0x6fb077e1,\n0x18b74777,0x88085ae6,0xff0f6a70,0x66063bca,0x11010b5c,\n0x8f659eff,0xf862ae69,0x616bffd3,0x166ccf45,0xa00ae278,\n0xd70dd2ee,0x4e048354,0x3903b3c2,0xa7672661,0xd06016f7,\n0x4969474d,0x3e6e77db,0xaed16a4a,0xd9d65adc,0x40df0b66,\n0x37d83bf0,0xa9bcae53,0xdebb9ec5,0x47b2cf7f,0x30b5ffe9,\n0xbdbdf21c,0xcabac28a,0x53b39330,0x24b4a3a6,0xbad03605,\n0xcdd70693,0x54de5729,0x23d967bf,0xb3667a2e,0xc4614ab8,\n0x5d681b02,0x2a6f2b94,0xb40bbe37,0xc30c8ea1,0x5a05df1b,\n0x2d02ef8d\n]\n\ndef crc32(s,crc=0):\n result=0\n crc=~int(crc)&0xffffffff\n \n for c in s:\n crc=crc_32_tab[(crc ^int(ord(c)))&0xff]^(crc >>8)\n \n \n \n result=crc ^0xffffffff\n \n if result >2 **31:\n result=((result+2 **31)%2 **32)-2 **31\n \n return result\n \ntable_hex=[\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n0,1,2,3,4,5,6,7,8,9,-1,-1,-1,-1,-1,-1,\n-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,10,11,12,13,14,15,-1,-1,-1,-1,-1,-1,-1,-1,-1,\n-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1,-1\n]\n\n\ndef a2b_hex(t):\n result=[]\n \n def pairs_gen(s):\n if isinstance(s,bytes)or isinstance(s,bytearray):\n conv=lambda x:x\n else :\n conv=lambda x:ord(x)\n while s:\n try :\n yield table_hex[conv(s[0])],table_hex[conv(s[1])]\n except IndexError:\n if len(s):\n raise TypeError('Odd-length string')\n return\n s=s[2:]\n \n for a,b in pairs_gen(t):\n if a <0 or b <0:\n raise TypeError('Non-hexadecimal digit found')\n result.append(chr((a <<4)+b))\n return bytes(''.join(result),__BRYTHON__.charset)\n \n \nunhexlify=a2b_hex\n", ["_base64", "_binascii"]], "bisect": [".py", "''\n\ndef insort_right(a,x,lo=0,hi=None ):\n ''\n\n\n\n\n\n \n \n if lo <0:\n raise ValueError('lo must be non-negative')\n if hi is None :\n hi=len(a)\n while lo =9:\n names=day_name\n else :\n names=day_abbr\n return names[day][:width].center(width)\n \n def formatweekheader(self,width):\n ''\n\n \n return ' '.join(self.formatweekday(i,width)for i in self.iterweekdays())\n \n def formatmonthname(self,theyear,themonth,width,withyear=True ):\n ''\n\n \n s=month_name[themonth]\n if withyear:\n s=\"%s %r\"%(s,theyear)\n return s.center(width)\n \n def prmonth(self,theyear,themonth,w=0,l=0):\n ''\n\n \n print(self.formatmonth(theyear,themonth,w,l),end='')\n \n def formatmonth(self,theyear,themonth,w=0,l=0):\n ''\n\n \n w=max(2,w)\n l=max(1,l)\n s=self.formatmonthname(theyear,themonth,7 *(w+1)-1)\n s=s.rstrip()\n s +='\\n'*l\n s +=self.formatweekheader(w).rstrip()\n s +='\\n'*l\n for week in self.monthdays2calendar(theyear,themonth):\n s +=self.formatweek(week,w).rstrip()\n s +='\\n'*l\n return s\n \n def formatyear(self,theyear,w=2,l=1,c=6,m=3):\n ''\n\n \n w=max(2,w)\n l=max(1,l)\n c=max(2,c)\n colwidth=(w+1)*7 -1\n v=[]\n a=v.append\n a(repr(theyear).center(colwidth *m+c *(m -1)).rstrip())\n a('\\n'*l)\n header=self.formatweekheader(w)\n for (i,row)in enumerate(self.yeardays2calendar(theyear,m)):\n \n months=range(m *i+1,min(m *(i+1)+1,13))\n a('\\n'*l)\n names=(self.formatmonthname(theyear,k,colwidth,False )\n for k in months)\n a(formatstring(names,colwidth,c).rstrip())\n a('\\n'*l)\n headers=(header for k in months)\n a(formatstring(headers,colwidth,c).rstrip())\n a('\\n'*l)\n \n height=max(len(cal)for cal in row)\n for j in range(height):\n weeks=[]\n for cal in row:\n if j >=len(cal):\n weeks.append('')\n else :\n weeks.append(self.formatweek(cal[j],w))\n a(formatstring(weeks,colwidth,c).rstrip())\n a('\\n'*l)\n return ''.join(v)\n \n def pryear(self,theyear,w=0,l=0,c=6,m=3):\n ''\n print(self.formatyear(theyear,w,l,c,m),end='')\n \n \nclass HTMLCalendar(Calendar):\n ''\n\n \n \n \n cssclasses=[\"mon\",\"tue\",\"wed\",\"thu\",\"fri\",\"sat\",\"sun\"]\n \n \n cssclasses_weekday_head=cssclasses\n \n \n cssclass_noday=\"noday\"\n \n \n cssclass_month_head=\"month\"\n \n \n cssclass_month=\"month\"\n \n \n cssclass_year_head=\"year\"\n \n \n cssclass_year=\"year\"\n \n def formatday(self,day,weekday):\n ''\n\n \n if day ==0:\n \n return ' '%self.cssclass_noday\n else :\n return '%d'%(self.cssclasses[weekday],day)\n \n def formatweek(self,theweek):\n ''\n\n \n s=''.join(self.formatday(d,wd)for (d,wd)in theweek)\n return '%s'%s\n \n def formatweekday(self,day):\n ''\n\n \n return '%s'%(\n self.cssclasses_weekday_head[day],day_abbr[day])\n \n def formatweekheader(self):\n ''\n\n \n s=''.join(self.formatweekday(i)for i in self.iterweekdays())\n return '%s'%s\n \n def formatmonthname(self,theyear,themonth,withyear=True ):\n ''\n\n \n if withyear:\n s='%s %s'%(month_name[themonth],theyear)\n else :\n s='%s'%month_name[themonth]\n return '%s'%(\n self.cssclass_month_head,s)\n \n def formatmonth(self,theyear,themonth,withyear=True ):\n ''\n\n \n v=[]\n a=v.append\n a(''%(\n self.cssclass_month))\n a('\\n')\n a(self.formatmonthname(theyear,themonth,withyear=withyear))\n a('\\n')\n a(self.formatweekheader())\n a('\\n')\n for week in self.monthdays2calendar(theyear,themonth):\n a(self.formatweek(week))\n a('\\n')\n a('
')\n a('\\n')\n return ''.join(v)\n \n def formatyear(self,theyear,width=3):\n ''\n\n \n v=[]\n a=v.append\n width=max(width,1)\n a(''%\n self.cssclass_year)\n a('\\n')\n a(''%(\n width,self.cssclass_year_head,theyear))\n for i in range(January,January+12,width):\n \n months=range(i,min(i+width,13))\n a('')\n for m in months:\n a('')\n a('')\n a('
%s
')\n a(self.formatmonth(theyear,m,withyear=False ))\n a('
')\n return ''.join(v)\n \n def formatyearpage(self,theyear,width=3,css='calendar.css',encoding=None ):\n ''\n\n \n if encoding is None :\n encoding=sys.getdefaultencoding()\n v=[]\n a=v.append\n a('\\n'%encoding)\n a('\\n')\n a('\\n')\n a('\\n')\n a('\\n'%encoding)\n if css is not None :\n a('\\n'%css)\n a('Calendar for %d\\n'%theyear)\n a('\\n')\n a('\\n')\n a(self.formatyear(theyear,width))\n a('\\n')\n a('\\n')\n return ''.join(v).encode(encoding,\"xmlcharrefreplace\")\n \n \nclass different_locale:\n def __init__(self,locale):\n self.locale=locale\n \n def __enter__(self):\n self.oldlocale=_locale.getlocale(_locale.LC_TIME)\n _locale.setlocale(_locale.LC_TIME,self.locale)\n \n def __exit__(self,*args):\n _locale.setlocale(_locale.LC_TIME,self.oldlocale)\n \n \nclass LocaleTextCalendar(TextCalendar):\n ''\n\n\n\n\n \n \n def __init__(self,firstweekday=0,locale=None ):\n TextCalendar.__init__(self,firstweekday)\n if locale is None :\n locale=_locale.getdefaultlocale()\n self.locale=locale\n \n def formatweekday(self,day,width):\n with different_locale(self.locale):\n if width >=9:\n names=day_name\n else :\n names=day_abbr\n name=names[day]\n return name[:width].center(width)\n \n def formatmonthname(self,theyear,themonth,width,withyear=True ):\n with different_locale(self.locale):\n s=month_name[themonth]\n if withyear:\n s=\"%s %r\"%(s,theyear)\n return s.center(width)\n \n \nclass LocaleHTMLCalendar(HTMLCalendar):\n ''\n\n\n\n\n \n def __init__(self,firstweekday=0,locale=None ):\n HTMLCalendar.__init__(self,firstweekday)\n if locale is None :\n locale=_locale.getdefaultlocale()\n self.locale=locale\n \n def formatweekday(self,day):\n with different_locale(self.locale):\n s=day_abbr[day]\n return '%s'%(self.cssclasses[day],s)\n \n def formatmonthname(self,theyear,themonth,withyear=True ):\n with different_locale(self.locale):\n s=month_name[themonth]\n if withyear:\n s='%s %s'%(s,theyear)\n return '%s'%s\n \n \n \nc=TextCalendar()\n\nfirstweekday=c.getfirstweekday\n\ndef setfirstweekday(firstweekday):\n if not MONDAY <=firstweekday <=SUNDAY:\n raise IllegalWeekdayError(firstweekday)\n c.firstweekday=firstweekday\n \nmonthcalendar=c.monthdayscalendar\nprweek=c.prweek\nweek=c.formatweek\nweekheader=c.formatweekheader\nprmonth=c.prmonth\nmonth=c.formatmonth\ncalendar=c.formatyear\nprcal=c.pryear\n\n\n\n_colwidth=7 *3 -1\n_spacing=6\n\n\ndef format(cols,colwidth=_colwidth,spacing=_spacing):\n ''\n print(formatstring(cols,colwidth,spacing))\n \n \ndef formatstring(cols,colwidth=_colwidth,spacing=_spacing):\n ''\n spacing *=' '\n return spacing.join(c.center(colwidth)for c in cols)\n \n \nEPOCH=1970\n_EPOCH_ORD=datetime.date(EPOCH,1,1).toordinal()\n\n\ndef timegm(tuple):\n ''\n year,month,day,hour,minute,second=tuple[:6]\n days=datetime.date(year,month,1).toordinal()-_EPOCH_ORD+day -1\n hours=days *24+hour\n minutes=hours *60+minute\n seconds=minutes *60+second\n return seconds\n \n \ndef main(args):\n import argparse\n parser=argparse.ArgumentParser()\n textgroup=parser.add_argument_group('text only arguments')\n htmlgroup=parser.add_argument_group('html only arguments')\n textgroup.add_argument(\n \"-w\",\"--width\",\n type=int,default=2,\n help=\"width of date column (default 2)\"\n )\n textgroup.add_argument(\n \"-l\",\"--lines\",\n type=int,default=1,\n help=\"number of lines for each week (default 1)\"\n )\n textgroup.add_argument(\n \"-s\",\"--spacing\",\n type=int,default=6,\n help=\"spacing between months (default 6)\"\n )\n textgroup.add_argument(\n \"-m\",\"--months\",\n type=int,default=3,\n help=\"months per row (default 3)\"\n )\n htmlgroup.add_argument(\n \"-c\",\"--css\",\n default=\"calendar.css\",\n help=\"CSS to use for page\"\n )\n parser.add_argument(\n \"-L\",\"--locale\",\n default=None ,\n help=\"locale to be used from month and weekday names\"\n )\n parser.add_argument(\n \"-e\",\"--encoding\",\n default=None ,\n help=\"encoding to use for output\"\n )\n parser.add_argument(\n \"-t\",\"--type\",\n default=\"text\",\n choices=(\"text\",\"html\"),\n help=\"output type (text or html)\"\n )\n parser.add_argument(\n \"year\",\n nargs='?',type=int,\n help=\"year number (1-9999)\"\n )\n parser.add_argument(\n \"month\",\n nargs='?',type=int,\n help=\"month number (1-12, text only)\"\n )\n \n options=parser.parse_args(args[1:])\n \n if options.locale and not options.encoding:\n parser.error(\"if --locale is specified --encoding is required\")\n sys.exit(1)\n \n locale=options.locale,options.encoding\n \n if options.type ==\"html\":\n if options.locale:\n cal=LocaleHTMLCalendar(locale=locale)\n else :\n cal=HTMLCalendar()\n encoding=options.encoding\n if encoding is None :\n encoding=sys.getdefaultencoding()\n optdict=dict(encoding=encoding,css=options.css)\n write=sys.stdout.buffer.write\n if options.year is None :\n write(cal.formatyearpage(datetime.date.today().year,**optdict))\n elif options.month is None :\n write(cal.formatyearpage(options.year,**optdict))\n else :\n parser.error(\"incorrect number of arguments\")\n sys.exit(1)\n else :\n if options.locale:\n cal=LocaleTextCalendar(locale=locale)\n else :\n cal=TextCalendar()\n optdict=dict(w=options.width,l=options.lines)\n if options.month is None :\n optdict[\"c\"]=options.spacing\n optdict[\"m\"]=options.months\n if options.year is None :\n result=cal.formatyear(datetime.date.today().year,**optdict)\n elif options.month is None :\n result=cal.formatyear(options.year,**optdict)\n else :\n result=cal.formatmonth(options.year,options.month,**optdict)\n write=sys.stdout.write\n if options.encoding:\n result=result.encode(options.encoding)\n write=sys.stdout.buffer.write\n write(result)\n \n \nif __name__ ==\"__main__\":\n main(sys.argv)\n", ["argparse", "datetime", "itertools", "locale", "sys"]], "cmath": [".py", "\n\n\n\n\n\n\n\n\n\nimport math\nimport sys\n\ndef takes_complex(func):\n def decorated(x):\n if isinstance(x,complex):\n return func(x)\n elif type(x)in [int,float]:\n return func(complex(x))\n elif hasattr(x,'__complex__'):\n c=x.__complex__()\n if not isinstance(c,complex):\n raise TypeError(\"A complex number is required\")\n else :\n return func(c)\n elif hasattr(x,'__float__'):\n try :\n c=complex(x.__float__(),0)\n except :\n raise TypeError(\"A complex number is required\")\n return func(c)\n else :\n raise TypeError(\"A complex number is required\")\n if hasattr(func,'__doc__'):\n decorated.__doc__=func.__doc__\n if hasattr(func,'__name__'):\n decorated.__name__=func.__name__\n return decorated\n \n@takes_complex\ndef isfinite(x):\n return math.isfinite(x.imag)and math.isfinite(x.real)\n \n@takes_complex\ndef phase(x):\n ''\n return math.atan2(x.imag,x.real)\n \n@takes_complex\ndef polar(x):\n ''\n\n\n\n\n \n phi=math.atan2(x.imag,x.real)\n r=math.sqrt(x.real **2+x.imag **2)\n return r,phi\n \ndef rect(r,phi):\n ''\n\n \n if math.isinf(r)or math.isinf(phi):\n \n \n if math.isinf(phi)and r !=.0 and not math.isnan(r):\n raise ValueError(\"math domain error\")\n \n \n \n \n if -_INF 0:\n _real=math.copysign(_INF,math.cos(phi))\n _imag=math.copysign(_INF,math.sin(phi))\n else :\n _real=-math.copysign(_INF,cos(phi));\n _imag=-math.copysign(_INF,sin(phi));\n return complex(_real,_imag)\n return _SPECIAL_VALUE(complex(r,phi),_rect_special_values)\n \n else :\n if phi ==.0:\n \n \n \n return complex(r,phi *r)\n else :\n return complex(r *math.cos(phi),r *math.sin(phi))\n \n@takes_complex\ndef sqrt(x):\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n s,d,ax,ay=.0,.0,math.fabs(x.real),math.fabs(x.imag)\n \n ret=_SPECIAL_VALUE(x,_sqrt_special_values)\n if ret is not None :\n return ret\n \n if x.real ==.0 and x.imag ==.0:\n _real=.0\n _imag=x.imag\n return complex(_real,_imag)\n \n if ax 0. or ay >0.):\n \n ax=math.ldexp(ax,_CM_SCALE_UP);\n s=math.ldexp(math.sqrt(ax+math.hypot(ax,math.ldexp(ay,_CM_SCALE_UP))),_CM_SCALE_DOWN)\n else :\n ax /=8.0 ;\n s=2.0 *math.sqrt(ax+math.hypot(ax,ay /8.0));\n \n d=ay /(2.0 *s)\n \n if x.real >=.0:\n _real=s ;\n _imag=math.copysign(d,x.imag)\n else :\n _real=d ;\n _imag=math.copysign(s,x.imag)\n \n return complex(_real,_imag)\n \n@takes_complex\ndef acos(x):\n ''\n\n\n\n\n \n \n ret=_SPECIAL_VALUE(x,_acos_special_values)\n if ret is not None :\n return ret\n \n if math.fabs(x.real)>_CM_LARGE_DOUBLE or math.fabs(x.imag)>_CM_LARGE_DOUBLE:\n \n \n _real=math.atan2(math.fabs(x.imag),x.real)\n \n \n \n if x.real <0:\n _imag=-math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,x.imag);\n else :\n _imag=math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,-x.imag);\n else :\n s1=complex(float(1 -x.real),-x.imag)\n s1=sqrt(s1)\n s2=complex(1.0+x.real,x.imag)\n s2=sqrt(s2)\n _real=2.0 *math.atan2(s1.real,s2.real);\n _imag=math.asinh(s2.real *s1.imag -s2.imag *s1.real)\n \n return complex(_real,_imag)\n \n@takes_complex\ndef acosh(x):\n ''\n\n\n\n \n ret=_SPECIAL_VALUE(x,_acosh_special_values)\n if ret is not None :\n return ret\n \n if math.fabs(x.real)>_CM_LARGE_DOUBLE or math.fabs(x.imag)>_CM_LARGE_DOUBLE:\n \n _real=math.log(math.hypot(x.real /2.0,x.imag /2.0))+_M_LN2 *2.0\n _imag=math.atan2(x.imag,x.real);\n else :\n s1=sqrt(complex(x.real -1.0,x.imag))\n s2=sqrt(complex(x.real+1.0,x.imag))\n _real=math.asinh(s1.real *s2.real+s1.imag *s2.imag)\n _imag=2. *math.atan2(s1.imag,s2.real)\n \n return complex(_real,_imag)\n \n@takes_complex\ndef asin(x):\n ''\n\n\n\n \n \n s=complex(-x.imag,x.real)\n s=asinh(s)\n return complex(s.imag,-s.real)\n \n@takes_complex\ndef asinh(x):\n ''\n\n\n\n\n \n ret=_SPECIAL_VALUE(x,_asinh_special_values)\n if ret is not None :\n return ret\n \n if math.fabs(x.real)>_CM_LARGE_DOUBLE or math.fabs(x.imag)>_CM_LARGE_DOUBLE:\n if x.imag >=.0:\n _real=math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,x.real)\n else :\n _real=-math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,-x.real)\n _imag=math.atan2(x.imag,math.fabs(x.real))\n else :\n s1=sqrt(complex(1.0+x.imag,-x.real))\n s2=sqrt(complex(1.0 -x.imag,x.real))\n _real=math.asinh(s1.real *s2.imag -s2.real *s1.imag)\n _imag=math.atan2(x.imag,s1.real *s2.real -s1.imag *s2.imag)\n return complex(_real,_imag)\n \n@takes_complex\ndef atan(x):\n ''\n\n\n\n\n \n s=atanh(complex(-x.imag,x.real))\n return complex(s.imag,-s.real)\n \n@takes_complex\ndef atanh(x):\n ''\n\n\n\n\n \n \n ret=_SPECIAL_VALUE(x,_atanh_special_values)\n if ret is not None :\n return ret\n \n \n if x.real <.0:\n return -(atanh(-x))\n \n ay=math.fabs(x.imag)\n \n if x.real >_CM_SQRT_LARGE_DOUBLE or ay >_CM_SQRT_LARGE_DOUBLE:\n \n \n \n \n \n h=math.hypot(x.real /2.,x.imag /2.)\n _real=x.real /4. /h /h\n \n \n \n \n \n \n _imag=-math.copysign(math.pi /2.,-x.imag)\n \n elif x.real ==1.0 and ay <_CM_SQRT_DBL_MIN:\n \n \n if (ay ==.0):\n raise ValueError(\"math domain error\")\n else :\n _real=-math.log(math.sqrt(ay)/math.sqrt(math.hypot(ay,2.)))\n _imag=math.copysign(math.atan2(2.0,-ay)/2,x.imag)\n \n else :\n \n _real=math.log1p(4. *x.real /((1 -x.real)*(1 -x.real)+ay *ay))/4.\n _imag=-math.atan2(-2. *x.imag,(1 -x.real)*(1+x.real)-ay *ay)/2.\n errno=0\n \n return complex(_real,_imag)\n \n@takes_complex\ndef cos(x):\n ''\n return cosh(complex(-x.imag,x.real))\n \n@takes_complex\ndef cosh(x):\n ''\n \n \n if isinf(x):\n if -_INF 0:\n _real=math.copysign(_INF,math.cos(x.imag))\n _imag=math.copysign(_INF,math.sin(x.imag))\n else :\n _real=math.copysign(_INF,math.cos(x.imag))\n _imag=-math.copysign(_INF,math.sin(x.imag))\n return complex(_real,_imag)\n else :\n \n if x.imag !=.0 and not math.isnan(x.real):\n raise ValueError(\"math domain error\")\n return _SPECIAL_VALUE(x,_cosh_special_values)\n \n if math.fabs(x.real)>_CM_LOG_LARGE_DOUBLE:\n \n \n x_minus_one=x.real -math.copysign(1.0,x.real)\n _real=cos(x.imag)*math.cosh(x_minus_one)*math.e\n _imag=sin(x.imag)*math.sinh(x_minus_one)*math.e\n else :\n _real=math.cos(x.imag)*math.cosh(x.real)\n _imag=math.sin(x.imag)*math.sinh(x.real)\n \n ret=complex(_real,_imag)\n \n if isinf(ret):\n raise OverflowError()\n return ret\n \n@takes_complex\ndef exp(x):\n ''\n if math.isinf(x.real)or math.isinf(x.imag):\n \n if math.isinf(x.imag)and (-_INF 0):\n raise ValueError(\"math domain error\")\n \n if math.isinf(x.real)and -_INF 0:\n _real=math.copysign(_INF,cos(x.imag))\n _imag=math.copysign(_INF,sin(x.imag))\n else :\n _real=math.copysign(.0,cos(x.imag))\n _imag=math.copysign(.0,sin(x.imag))\n return complex(_real,_imag)\n \n return _SPECIAL_VALUE(x,_exp_special_values)\n \n \n if x.real >_CM_LOG_LARGE_DOUBLE:\n l=math.exp(x.real -1.);\n _real=l *math.cos(x.imag)*math.e\n _imag=l *math.sin(x.imag)*math.e\n else :\n l=math.exp(x.real);\n _real=l *math.cos(x.imag)\n _imag=l *math.sin(x.imag)\n \n if math.isinf(_real)or math.isinf(_imag):\n raise OverflowError()\n \n return complex(_real,_imag)\n \ndef isclose(x,y,*,rel_tol=1e-09,abs_tol=0.0):\n rel_tol=float(rel_tol)\n abs_tol=float(abs_tol)\n return abs(x -y)<=max(rel_tol *max(abs(x),abs(y)),abs_tol)\n \n@takes_complex\ndef isinf(x):\n ''\n return math.isinf(x.real)or math.isinf(x.imag)\n \n@takes_complex\ndef isnan(x):\n ''\n return math.isnan(x.real)or math.isnan(x.imag)\n \n \n@takes_complex\ndef _to_complex(x):\n return x\n \ndef log(x,base=None ):\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n x=_to_complex(x)\n \n \n \n \n \n \n if base is not None :\n x=log(x)\n base=log(base)\n x=x /base\n \n ret=_SPECIAL_VALUE(x,_log_special_values)\n if ret is not None :\n return ret\n \n ax=math.fabs(x.real)\n ay=math.fabs(x.imag)\n \n if ax >_CM_LARGE_DOUBLE or ay >_CM_LARGE_DOUBLE:\n _real=math.log(math.hypot(ax /2.0,ay /2.0))+_M_LN2\n elif ax .0 or ay >.0:\n \n _real=math.log(math.hypot(math.ldexp(ax,sys.float_info.mant_dig),math.ldexp(ay,sys.float_info.mant_dig)))-sys.float_info.mant_dig *_M_LN2\n else :\n \n raise ValueError(\"math domain error\")\n _real=-_INF\n _imag=math.atan2(x.imag,x.real)\n else :\n h=math.hypot(ax,ay)\n if 0.71 <=h and h <=1.73:\n am=max(ax,ay)\n an=min(ax,ay)\n _real=math.log1p((am -1)*(am+1)+an *an)/2.\n else :\n _real=math.log(h)\n _imag=math.atan2(x.imag,x.real)\n return complex(_real,_imag)\n \n@takes_complex\ndef log10(x):\n ''\n\n\n\n \n ret=log(x);\n _real=ret.real /_M_LN10\n _imag=ret.imag /_M_LN10\n return complex(_real,_imag)\n \n@takes_complex\ndef sin(x):\n ''\n \n s=complex(-x.imag,x.real)\n s=sinh(s)\n return complex(s.imag,-s.real)\n \n@takes_complex\ndef sinh(x):\n ''\n \n if math.isinf(x.real)or math.isinf(x.imag):\n \n \n if math.isinf(x.imag)and not math.isnan(x.real):\n raise ValueError(\"math domain error\")\n \n if math.isinf(x.real)and -_INF 0:\n _real=math.copysign(_INF,cos(x.imag))\n _imag=math.copysign(_INF,sin(x.imag))\n else :\n _real=-math.copysign(_INF,cos(x.imag))\n _imag=math.copysign(_INF,sin(x.imag))\n return complex(_real,_imag)\n \n return _SPECIAL_VALUE(x,_sinh_special_values)\n \n if math.fabs(x.real)>_CM_LOG_LARGE_DOUBLE:\n x_minus_one=x.real -math.copysign(1.0,x.real)\n _real=math.cos(x.imag)*math.sinh(x.imag)*math.e\n _imag=math.sin(x.imag)*math.cosh(x.imag)*math.e\n else :\n _real=math.cos(x.imag)*math.sinh(x.real)\n _imag=math.sin(x.imag)*math.cosh(x.real)\n \n if math.isinf(_real)or math.isinf(_imag):\n raise OverflowError()\n \n return complex(_real,_imag)\n \n@takes_complex\ndef tan(x):\n ''\n s=atanh(complex(-x.imag,x.real))\n return complex(s.imag,-s.real)\n \n@takes_complex\ndef tanh(x):\n ''\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n \n if isinf(x):\n if math.isinf(x.imag)and -_INF 0:\n _real=1.0\n _imag=math.copysign(.0,2.0 *math.sin(x.imag)*math.cos(x.imag))\n else :\n _real=-1.0\n _imag=math.copysign(.0,2. *math.sin(x.imag)*math.cos(x.imag))\n return complex(_real,_imag)\n return _SPECIAL_VALUE(x,_tanh_special_values)\n \n \n if math.fabs(x.real)>_CM_LOG_LARGE_DOUBLE:\n _real=math.copysign(1.,x.real)\n _imag=4. *math.sin(x.imag)*math.cos(x.imag)*math.exp(-2. *math.fabs(x.real))\n else :\n tx=math.tanh(x.real)\n ty=math.tan(x.imag)\n cx=1.0 /math.cosh(x.real)\n txty=tx *ty\n denom=1.+txty *txty\n _real=tx *(1.+ty *ty)/denom\n _imag=((ty /denom)*cx)*cx\n return complex(_real,_imag)\n \n \npi=math.pi\ne=math.e\n\n_CM_LARGE_DOUBLE=sys.float_info.max /4\n_CM_SQRT_LARGE_DOUBLE=math.sqrt(_CM_LARGE_DOUBLE)\n_CM_LOG_LARGE_DOUBLE=math.log(_CM_LARGE_DOUBLE)\n_CM_SQRT_DBL_MIN=math.sqrt(sys.float_info.min)\n_M_LN2=0.6931471805599453094\n_M_LN10=2.302585092994045684\n\nif sys.float_info.radix ==2:\n _CM_SCALE_UP=int((2 *(sys.float_info.mant_dig /2)+1))\nelif sys.float_info.radix ==16:\n _CM_SCALE_UP=int((4 *sys.float_info.mant_dig+1))\nelse :\n raise (\"cmath implementation expects the float base to be either 2 or 16, got \"+str(sys.float_info.radix)+\" instead.\")\n_CM_SCALE_DOWN=int((-(_CM_SCALE_UP+1)/2))\n\n_INF=float('inf')\n_NAN=float('nan')\n_PI=math.pi\n_P14=0.25 *math.pi\n_P12=0.5 *math.pi\n_P34=0.75 *math.pi\n_U=-9.5426319407711027e33\n\n\n_ST_NINF=0\n_ST_NEG=1\n_ST_NZERO=2\n_ST_PZERO=3\n_ST_POS=4\n_ST_PINF=5\n_ST_NAN=6\n\n\ndef _SPECIAL_VALUE(z,table):\n if not math.isfinite(z.real)or not math.isfinite(z.imag):\n return table[_special_type(z.real)][_special_type(z.imag)]\n else :\n return None\n \ndef _special_type(x):\n if -_INF ):\"\n misc_header=\"Miscellaneous help topics:\"\n undoc_header=\"Undocumented commands:\"\n nohelp=\"*** No help on %s\"\n use_rawinput=1\n \n def __init__(self,completekey='tab',stdin=None ,stdout=None ):\n ''\n\n\n\n\n\n\n\n\n \n if stdin is not None :\n self.stdin=stdin\n else :\n self.stdin=sys.stdin\n if stdout is not None :\n self.stdout=stdout\n else :\n self.stdout=sys.stdout\n self.cmdqueue=[]\n self.completekey=completekey\n \n def cmdloop(self,intro=None ):\n ''\n\n\n\n \n \n self.preloop()\n if self.use_rawinput and self.completekey:\n try :\n import readline\n self.old_completer=readline.get_completer()\n readline.set_completer(self.complete)\n readline.parse_and_bind(self.completekey+\": complete\")\n except ImportError:\n pass\n try :\n if intro is not None :\n self.intro=intro\n if self.intro:\n self.stdout.write(str(self.intro)+\"\\n\")\n stop=None\n while not stop:\n if self.cmdqueue:\n line=self.cmdqueue.pop(0)\n else :\n if self.use_rawinput:\n try :\n line=input(self.prompt)\n except EOFError:\n line='EOF'\n else :\n self.stdout.write(self.prompt)\n self.stdout.flush()\n line=self.stdin.readline()\n if not len(line):\n line='EOF'\n else :\n line=line.rstrip('\\r\\n')\n line=self.precmd(line)\n stop=self.onecmd(line)\n stop=self.postcmd(stop,line)\n self.postloop()\n finally :\n if self.use_rawinput and self.completekey:\n try :\n import readline\n readline.set_completer(self.old_completer)\n except ImportError:\n pass\n \n \n def precmd(self,line):\n ''\n\n\n \n return line\n \n def postcmd(self,stop,line):\n ''\n return stop\n \n def preloop(self):\n ''\n pass\n \n def postloop(self):\n ''\n\n\n \n pass\n \n def parseline(self,line):\n ''\n\n\n \n line=line.strip()\n if not line:\n return None ,None ,line\n elif line[0]=='?':\n line='help '+line[1:]\n elif line[0]=='!':\n if hasattr(self,'do_shell'):\n line='shell '+line[1:]\n else :\n return None ,None ,line\n i,n=0,len(line)\n while i 0:\n cmd,args,foo=self.parseline(line)\n if cmd =='':\n compfunc=self.completedefault\n else :\n try :\n compfunc=getattr(self,'complete_'+cmd)\n except AttributeError:\n compfunc=self.completedefault\n else :\n compfunc=self.completenames\n self.completion_matches=compfunc(text,line,begidx,endidx)\n try :\n return self.completion_matches[state]\n except IndexError:\n return None\n \n def get_names(self):\n \n \n return dir(self.__class__)\n \n def complete_help(self,*args):\n commands=set(self.completenames(*args))\n topics=set(a[5:]for a in self.get_names()\n if a.startswith('help_'+args[0]))\n return list(commands |topics)\n \n def do_help(self,arg):\n ''\n if arg:\n \n try :\n func=getattr(self,'help_'+arg)\n except AttributeError:\n try :\n doc=getattr(self,'do_'+arg).__doc__\n if doc:\n self.stdout.write(\"%s\\n\"%str(doc))\n return\n except AttributeError:\n pass\n self.stdout.write(\"%s\\n\"%str(self.nohelp %(arg,)))\n return\n func()\n else :\n names=self.get_names()\n cmds_doc=[]\n cmds_undoc=[]\n help={}\n for name in names:\n if name[:5]=='help_':\n help[name[5:]]=1\n names.sort()\n \n prevname=''\n for name in names:\n if name[:3]=='do_':\n if name ==prevname:\n continue\n prevname=name\n cmd=name[3:]\n if cmd in help:\n cmds_doc.append(cmd)\n del help[cmd]\n elif getattr(self,name).__doc__:\n cmds_doc.append(cmd)\n else :\n cmds_undoc.append(cmd)\n self.stdout.write(\"%s\\n\"%str(self.doc_leader))\n self.print_topics(self.doc_header,cmds_doc,15,80)\n self.print_topics(self.misc_header,list(help.keys()),15,80)\n self.print_topics(self.undoc_header,cmds_undoc,15,80)\n \n def print_topics(self,header,cmds,cmdlen,maxcol):\n if cmds:\n self.stdout.write(\"%s\\n\"%str(header))\n if self.ruler:\n self.stdout.write(\"%s\\n\"%str(self.ruler *len(header)))\n self.columnize(cmds,maxcol -1)\n self.stdout.write(\"\\n\")\n \n def columnize(self,list,displaywidth=80):\n ''\n\n\n\n \n if not list:\n self.stdout.write(\"\\n\")\n return\n \n nonstrings=[i for i in range(len(list))\n if not isinstance(list[i],str)]\n if nonstrings:\n raise TypeError(\"list[i] not a string for i in %s\"\n %\", \".join(map(str,nonstrings)))\n size=len(list)\n if size ==1:\n self.stdout.write('%s\\n'%str(list[0]))\n return\n \n for nrows in range(1,len(list)):\n ncols=(size+nrows -1)//nrows\n colwidths=[]\n totwidth=-2\n for col in range(ncols):\n colwidth=0\n for row in range(nrows):\n i=row+nrows *col\n if i >=size:\n break\n x=list[i]\n colwidth=max(colwidth,len(x))\n colwidths.append(colwidth)\n totwidth +=colwidth+2\n if totwidth >displaywidth:\n break\n if totwidth <=displaywidth:\n break\n else :\n nrows=len(list)\n ncols=1\n colwidths=[0]\n for row in range(nrows):\n texts=[]\n for col in range(ncols):\n i=row+nrows *col\n if i >=size:\n x=\"\"\n else :\n x=list[i]\n texts.append(x)\n while texts and not texts[-1]:\n del texts[-1]\n for col in range(len(texts)):\n texts[col]=texts[col].ljust(colwidths[col])\n self.stdout.write(\"%s\\n\"%str(\" \".join(texts)))\n", ["readline", "string", "sys"]], "code": [".py", "''\n\n\n\n\n\n\nimport sys\nimport traceback\nfrom codeop import CommandCompiler,compile_command\n\n__all__=[\"InteractiveInterpreter\",\"InteractiveConsole\",\"interact\",\n\"compile_command\"]\n\nclass InteractiveInterpreter:\n ''\n\n\n\n\n\n \n \n def __init__(self,locals=None ):\n ''\n\n\n\n\n\n\n \n if locals is None :\n locals={\"__name__\":\"__console__\",\"__doc__\":None }\n self.locals=locals\n self.compile=CommandCompiler()\n \n def runsource(self,source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n code=self.compile(source,filename,symbol)\n except (OverflowError,SyntaxError,ValueError):\n \n self.showsyntaxerror(filename)\n return False\n \n if code is None :\n \n return True\n \n \n self.runcode(code)\n return False\n \n def runcode(self,code):\n ''\n\n\n\n\n\n\n\n\n\n \n try :\n exec(code,self.locals)\n except SystemExit:\n raise\n except :\n self.showtraceback()\n \n def showsyntaxerror(self,filename=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n type,value,tb=sys.exc_info()\n sys.last_type=type\n sys.last_value=value\n sys.last_traceback=tb\n if filename and type is SyntaxError:\n \n try :\n msg,(dummy_filename,lineno,offset,line)=value.args\n except ValueError:\n \n pass\n else :\n \n value=SyntaxError(msg,(filename,lineno,offset,line))\n sys.last_value=value\n if sys.excepthook is sys.__excepthook__:\n lines=traceback.format_exception_only(type,value)\n self.write(''.join(lines))\n else :\n \n \n sys.excepthook(type,value,tb)\n \n def showtraceback(self):\n ''\n\n\n\n\n\n \n sys.last_type,sys.last_value,last_tb=ei=sys.exc_info()\n sys.last_traceback=last_tb\n try :\n lines=traceback.format_exception(ei[0],ei[1],last_tb.tb_next)\n if sys.excepthook is sys.__excepthook__:\n self.write(''.join(lines))\n else :\n \n \n sys.excepthook(ei[0],ei[1],last_tb)\n finally :\n last_tb=ei=None\n \n def write(self,data):\n ''\n\n\n\n\n \n sys.stderr.write(data)\n \n \nclass InteractiveConsole(InteractiveInterpreter):\n ''\n\n\n\n\n \n \n def __init__(self,locals=None ,filename=\"\"):\n ''\n\n\n\n\n\n\n\n \n InteractiveInterpreter.__init__(self,locals)\n self.filename=filename\n self.resetbuffer()\n \n def resetbuffer(self):\n ''\n self.buffer=[]\n \n def interact(self,banner=None ,exitmsg=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n sys.ps1\n except AttributeError:\n sys.ps1=\">>> \"\n try :\n sys.ps2\n except AttributeError:\n sys.ps2=\"... \"\n cprt='Type \"help\", \"copyright\", \"credits\" or \"license\" for more information.'\n if banner is None :\n self.write(\"Python %s on %s\\n%s\\n(%s)\\n\"%\n (sys.version,sys.platform,cprt,\n self.__class__.__name__))\n elif banner:\n self.write(\"%s\\n\"%str(banner))\n more=0\n while 1:\n try :\n if more:\n prompt=sys.ps2\n else :\n prompt=sys.ps1\n try :\n line=self.raw_input(prompt)\n except EOFError:\n self.write(\"\\n\")\n break\n else :\n more=self.push(line)\n except KeyboardInterrupt:\n self.write(\"\\nKeyboardInterrupt\\n\")\n self.resetbuffer()\n more=0\n if exitmsg is None :\n self.write('now exiting %s...\\n'%self.__class__.__name__)\n elif exitmsg !='':\n self.write('%s\\n'%exitmsg)\n \n def push(self,line):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self.buffer.append(line)\n source=\"\\n\".join(self.buffer)\n more=self.runsource(source,self.filename)\n if not more:\n self.resetbuffer()\n return more\n \n def raw_input(self,prompt=\"\"):\n ''\n\n\n\n\n\n\n\n\n \n return input(prompt)\n \n \n \ndef interact(banner=None ,readfunc=None ,local=None ,exitmsg=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n console=InteractiveConsole(local)\n if readfunc is not None :\n console.raw_input=readfunc\n else :\n try :\n import readline\n except ImportError:\n pass\n console.interact(banner,exitmsg)\n \n \nif __name__ ==\"__main__\":\n import argparse\n \n parser=argparse.ArgumentParser()\n parser.add_argument('-q',action='store_true',\n help=\"don't print version and copyright messages\")\n args=parser.parse_args()\n if args.q or sys.flags.quiet:\n banner=''\n else :\n banner=None\n interact(banner)\n", ["argparse", "codeop", "readline", "sys", "traceback"]], "codecs": [".py", "''\n\n\n\n\n\n\n\n\nimport builtins\nimport sys\n\n\n\ntry :\n from _codecs import *\nexcept ImportError as why:\n raise SystemError('Failed to load the builtin codecs: %s'%why)\n \n__all__=[\"register\",\"lookup\",\"open\",\"EncodedFile\",\"BOM\",\"BOM_BE\",\n\"BOM_LE\",\"BOM32_BE\",\"BOM32_LE\",\"BOM64_BE\",\"BOM64_LE\",\n\"BOM_UTF8\",\"BOM_UTF16\",\"BOM_UTF16_LE\",\"BOM_UTF16_BE\",\n\"BOM_UTF32\",\"BOM_UTF32_LE\",\"BOM_UTF32_BE\",\n\"CodecInfo\",\"Codec\",\"IncrementalEncoder\",\"IncrementalDecoder\",\n\"StreamReader\",\"StreamWriter\",\n\"StreamReaderWriter\",\"StreamRecoder\",\n\"getencoder\",\"getdecoder\",\"getincrementalencoder\",\n\"getincrementaldecoder\",\"getreader\",\"getwriter\",\n\"encode\",\"decode\",\"iterencode\",\"iterdecode\",\n\"strict_errors\",\"ignore_errors\",\"replace_errors\",\n\"xmlcharrefreplace_errors\",\n\"backslashreplace_errors\",\"namereplace_errors\",\n\"register_error\",\"lookup_error\"]\n\n\n\n\n\n\n\n\n\n\nBOM_UTF8=b'\\xef\\xbb\\xbf'\n\n\nBOM_LE=BOM_UTF16_LE=b'\\xff\\xfe'\n\n\nBOM_BE=BOM_UTF16_BE=b'\\xfe\\xff'\n\n\nBOM_UTF32_LE=b'\\xff\\xfe\\x00\\x00'\n\n\nBOM_UTF32_BE=b'\\x00\\x00\\xfe\\xff'\n\nif sys.byteorder =='little':\n\n\n BOM=BOM_UTF16=BOM_UTF16_LE\n \n \n BOM_UTF32=BOM_UTF32_LE\n \nelse :\n\n\n BOM=BOM_UTF16=BOM_UTF16_BE\n \n \n BOM_UTF32=BOM_UTF32_BE\n \n \nBOM32_LE=BOM_UTF16_LE\nBOM32_BE=BOM_UTF16_BE\nBOM64_LE=BOM_UTF32_LE\nBOM64_BE=BOM_UTF32_BE\n\n\n\n\nclass CodecInfo(tuple):\n ''\n \n \n \n \n \n \n \n _is_text_encoding=True\n \n def __new__(cls,encode,decode,streamreader=None ,streamwriter=None ,\n incrementalencoder=None ,incrementaldecoder=None ,name=None ,\n *,_is_text_encoding=None ):\n self=tuple.__new__(cls,(encode,decode,streamreader,streamwriter))\n self.name=name\n self.encode=encode\n self.decode=decode\n self.incrementalencoder=incrementalencoder\n self.incrementaldecoder=incrementaldecoder\n self.streamwriter=streamwriter\n self.streamreader=streamreader\n if _is_text_encoding is not None :\n self._is_text_encoding=_is_text_encoding\n return self\n \n def __repr__(self):\n return \"<%s.%s object for encoding %s at %#x>\"%\\\n (self.__class__.__module__,self.__class__.__qualname__,\n self.name,id(self))\n \nclass Codec:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def encode(self,input,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def decode(self,input,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \nclass IncrementalEncoder(object):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n ''\n\n\n\n\n\n \n self.errors=errors\n self.buffer=\"\"\n \n def encode(self,input,final=False ):\n ''\n\n \n raise NotImplementedError\n \n def reset(self):\n ''\n\n \n \n def getstate(self):\n ''\n\n \n return 0\n \n def setstate(self,state):\n ''\n\n\n \n \nclass BufferedIncrementalEncoder(IncrementalEncoder):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n IncrementalEncoder.__init__(self,errors)\n \n self.buffer=\"\"\n \n def _buffer_encode(self,input,errors,final):\n \n \n raise NotImplementedError\n \n def encode(self,input,final=False ):\n \n data=self.buffer+input\n (result,consumed)=self._buffer_encode(data,self.errors,final)\n \n self.buffer=data[consumed:]\n return result\n \n def reset(self):\n IncrementalEncoder.reset(self)\n self.buffer=\"\"\n \n def getstate(self):\n return self.buffer or 0\n \n def setstate(self,state):\n self.buffer=state or \"\"\n \nclass IncrementalDecoder(object):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n ''\n\n\n\n\n\n \n self.errors=errors\n \n def decode(self,input,final=False ):\n ''\n\n \n raise NotImplementedError\n \n def reset(self):\n ''\n\n \n \n def getstate(self):\n ''\n\n\n\n\n\n\n\n\n\n \n return (b\"\",0)\n \n def setstate(self,state):\n ''\n\n\n\n\n \n \nclass BufferedIncrementalDecoder(IncrementalDecoder):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n IncrementalDecoder.__init__(self,errors)\n \n self.buffer=b\"\"\n \n def _buffer_decode(self,input,errors,final):\n \n \n raise NotImplementedError\n \n def decode(self,input,final=False ):\n \n data=self.buffer+input\n (result,consumed)=self._buffer_decode(data,self.errors,final)\n \n self.buffer=data[consumed:]\n return result\n \n def reset(self):\n IncrementalDecoder.reset(self)\n self.buffer=b\"\"\n \n def getstate(self):\n \n return (self.buffer,0)\n \n def setstate(self,state):\n \n self.buffer=state[0]\n \n \n \n \n \n \n \n \nclass StreamWriter(Codec):\n\n def __init__(self,stream,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.errors=errors\n \n def write(self,object):\n \n ''\n \n data,consumed=self.encode(object,self.errors)\n self.stream.write(data)\n \n def writelines(self,list):\n \n ''\n\n \n self.write(''.join(list))\n \n def reset(self):\n \n ''\n\n\n\n\n\n\n \n pass\n \n def seek(self,offset,whence=0):\n self.stream.seek(offset,whence)\n if whence ==0 and offset ==0:\n self.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamReader(Codec):\n\n charbuffertype=str\n \n def __init__(self,stream,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.errors=errors\n self.bytebuffer=b\"\"\n self._empty_charbuffer=self.charbuffertype()\n self.charbuffer=self._empty_charbuffer\n self.linebuffer=None\n \n def decode(self,input,errors='strict'):\n raise NotImplementedError\n \n def read(self,size=-1,chars=-1,firstline=False ):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if self.linebuffer:\n self.charbuffer=self._empty_charbuffer.join(self.linebuffer)\n self.linebuffer=None\n \n if chars <0:\n \n \n chars=size\n \n \n while True :\n \n if chars >=0:\n if len(self.charbuffer)>=chars:\n break\n \n if size <0:\n newdata=self.stream.read()\n else :\n newdata=self.stream.read(size)\n \n data=self.bytebuffer+newdata\n if not data:\n break\n try :\n newchars,decodedbytes=self.decode(data,self.errors)\n except UnicodeDecodeError as exc:\n if firstline:\n newchars,decodedbytes=\\\n self.decode(data[:exc.start],self.errors)\n lines=newchars.splitlines(keepends=True )\n if len(lines)<=1:\n raise\n else :\n raise\n \n self.bytebuffer=data[decodedbytes:]\n \n self.charbuffer +=newchars\n \n if not newdata:\n break\n if chars <0:\n \n result=self.charbuffer\n self.charbuffer=self._empty_charbuffer\n else :\n \n result=self.charbuffer[:chars]\n self.charbuffer=self.charbuffer[chars:]\n return result\n \n def readline(self,size=None ,keepends=True ):\n \n ''\n\n\n\n\n\n \n \n \n if self.linebuffer:\n line=self.linebuffer[0]\n del self.linebuffer[0]\n if len(self.linebuffer)==1:\n \n \n self.charbuffer=self.linebuffer[0]\n self.linebuffer=None\n if not keepends:\n line=line.splitlines(keepends=False )[0]\n return line\n \n readsize=size or 72\n line=self._empty_charbuffer\n \n while True :\n data=self.read(readsize,firstline=True )\n if data:\n \n \n \n if (isinstance(data,str)and data.endswith(\"\\r\"))or\\\n (isinstance(data,bytes)and data.endswith(b\"\\r\")):\n data +=self.read(size=1,chars=1)\n \n line +=data\n lines=line.splitlines(keepends=True )\n if lines:\n if len(lines)>1:\n \n \n line=lines[0]\n del lines[0]\n if len(lines)>1:\n \n lines[-1]+=self.charbuffer\n self.linebuffer=lines\n self.charbuffer=None\n else :\n \n self.charbuffer=lines[0]+self.charbuffer\n if not keepends:\n line=line.splitlines(keepends=False )[0]\n break\n line0withend=lines[0]\n line0withoutend=lines[0].splitlines(keepends=False )[0]\n if line0withend !=line0withoutend:\n \n self.charbuffer=self._empty_charbuffer.join(lines[1:])+\\\n self.charbuffer\n if keepends:\n line=line0withend\n else :\n line=line0withoutend\n break\n \n if not data or size is not None :\n if line and not keepends:\n line=line.splitlines(keepends=False )[0]\n break\n if readsize <8000:\n readsize *=2\n return line\n \n def readlines(self,sizehint=None ,keepends=True ):\n \n ''\n\n\n\n\n\n\n\n\n \n data=self.read()\n return data.splitlines(keepends)\n \n def reset(self):\n \n ''\n\n\n\n\n\n \n self.bytebuffer=b\"\"\n self.charbuffer=self._empty_charbuffer\n self.linebuffer=None\n \n def seek(self,offset,whence=0):\n ''\n\n\n \n self.stream.seek(offset,whence)\n self.reset()\n \n def __next__(self):\n \n ''\n line=self.readline()\n if line:\n return line\n raise StopIteration\n \n def __iter__(self):\n return self\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamReaderWriter:\n\n ''\n\n\n\n\n\n\n \n \n encoding='unknown'\n \n def __init__(self,stream,Reader,Writer,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.reader=Reader(stream,errors)\n self.writer=Writer(stream,errors)\n self.errors=errors\n \n def read(self,size=-1):\n \n return self.reader.read(size)\n \n def readline(self,size=None ):\n \n return self.reader.readline(size)\n \n def readlines(self,sizehint=None ):\n \n return self.reader.readlines(sizehint)\n \n def __next__(self):\n \n ''\n return next(self.reader)\n \n def __iter__(self):\n return self\n \n def write(self,data):\n \n return self.writer.write(data)\n \n def writelines(self,list):\n \n return self.writer.writelines(list)\n \n def reset(self):\n \n self.reader.reset()\n self.writer.reset()\n \n def seek(self,offset,whence=0):\n self.stream.seek(offset,whence)\n self.reader.reset()\n if whence ==0 and offset ==0:\n self.writer.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n \n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamRecoder:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n data_encoding='unknown'\n file_encoding='unknown'\n \n def __init__(self,stream,encode,decode,Reader,Writer,\n errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.encode=encode\n self.decode=decode\n self.reader=Reader(stream,errors)\n self.writer=Writer(stream,errors)\n self.errors=errors\n \n def read(self,size=-1):\n \n data=self.reader.read(size)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def readline(self,size=None ):\n \n if size is None :\n data=self.reader.readline()\n else :\n data=self.reader.readline(size)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def readlines(self,sizehint=None ):\n \n data=self.reader.read()\n data,bytesencoded=self.encode(data,self.errors)\n return data.splitlines(keepends=True )\n \n def __next__(self):\n \n ''\n data=next(self.reader)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def __iter__(self):\n return self\n \n def write(self,data):\n \n data,bytesdecoded=self.decode(data,self.errors)\n return self.writer.write(data)\n \n def writelines(self,list):\n \n data=''.join(list)\n data,bytesdecoded=self.decode(data,self.errors)\n return self.writer.write(data)\n \n def reset(self):\n \n self.reader.reset()\n self.writer.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \ndef open(filename,mode='r',encoding=None ,errors='strict',buffering=1):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if encoding is not None and\\\n 'b'not in mode:\n \n mode=mode+'b'\n file=builtins.open(filename,mode,buffering)\n if encoding is None :\n return file\n info=lookup(encoding)\n srw=StreamReaderWriter(file,info.streamreader,info.streamwriter,errors)\n \n srw.encoding=encoding\n return srw\n \ndef EncodedFile(file,data_encoding,file_encoding=None ,errors='strict'):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if file_encoding is None :\n file_encoding=data_encoding\n data_info=lookup(data_encoding)\n file_info=lookup(file_encoding)\n sr=StreamRecoder(file,data_info.encode,data_info.decode,\n file_info.streamreader,file_info.streamwriter,errors)\n \n sr.data_encoding=data_encoding\n sr.file_encoding=file_encoding\n return sr\n \n \n \ndef getencoder(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).encode\n \ndef getdecoder(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).decode\n \ndef getincrementalencoder(encoding):\n\n ''\n\n\n\n\n\n \n encoder=lookup(encoding).incrementalencoder\n if encoder is None :\n raise LookupError(encoding)\n return encoder\n \ndef getincrementaldecoder(encoding):\n\n ''\n\n\n\n\n\n \n decoder=lookup(encoding).incrementaldecoder\n if decoder is None :\n raise LookupError(encoding)\n return decoder\n \ndef getreader(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).streamreader\n \ndef getwriter(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).streamwriter\n \ndef iterencode(iterator,encoding,errors='strict',**kwargs):\n ''\n\n\n\n\n\n\n \n encoder=getincrementalencoder(encoding)(errors,**kwargs)\n for input in iterator:\n output=encoder.encode(input)\n if output:\n yield output\n output=encoder.encode(\"\",True )\n if output:\n yield output\n \ndef iterdecode(iterator,encoding,errors='strict',**kwargs):\n ''\n\n\n\n\n\n\n \n decoder=getincrementaldecoder(encoding)(errors,**kwargs)\n for input in iterator:\n output=decoder.decode(input)\n if output:\n yield output\n output=decoder.decode(b\"\",True )\n if output:\n yield output\n \n \n \ndef make_identity_dict(rng):\n\n ''\n\n\n\n\n \n return {i:i for i in rng}\n \ndef make_encoding_map(decoding_map):\n\n ''\n\n\n\n\n\n\n\n\n\n \n m={}\n for k,v in decoding_map.items():\n if not v in m:\n m[v]=k\n else :\n m[v]=None\n return m\n \n \n \ntry :\n strict_errors=lookup_error(\"strict\")\n ignore_errors=lookup_error(\"ignore\")\n replace_errors=lookup_error(\"replace\")\n xmlcharrefreplace_errors=lookup_error(\"xmlcharrefreplace\")\n backslashreplace_errors=lookup_error(\"backslashreplace\")\n namereplace_errors=lookup_error(\"namereplace\")\nexcept LookupError:\n\n strict_errors=None\n ignore_errors=None\n replace_errors=None\n xmlcharrefreplace_errors=None\n backslashreplace_errors=None\n namereplace_errors=None\n \n \n \n_false=0\nif _false:\n import encodings\n \n \n \nif __name__ =='__main__':\n\n\n sys.stdout=EncodedFile(sys.stdout,'latin-1','utf-8')\n \n \n sys.stdin=EncodedFile(sys.stdin,'utf-8','latin-1')\n", ["_codecs", "builtins", "encodings", "sys"]], "codeop": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport __future__\n\n_features=[getattr(__future__,fname)\nfor fname in __future__.all_feature_names]\n\n__all__=[\"compile_command\",\"Compile\",\"CommandCompiler\"]\n\nPyCF_DONT_IMPLY_DEDENT=0x200\n\ndef _maybe_compile(compiler,source,filename,symbol):\n\n for line in source.split(\"\\n\"):\n line=line.strip()\n if line and line[0]!='#':\n break\n else :\n if symbol !=\"eval\":\n source=\"pass\"\n \n err=err1=err2=None\n code=code1=code2=None\n \n try :\n code=compiler(source,filename,symbol)\n except SyntaxError as err:\n pass\n \n try :\n code1=compiler(source+\"\\n\",filename,symbol)\n except SyntaxError as e:\n err1=e\n \n try :\n code2=compiler(source+\"\\n\\n\",filename,symbol)\n except SyntaxError as e:\n err2=e\n \n if code:\n return code\n if not code1 and repr(err1)==repr(err2):\n raise err1\n \ndef _compile(source,filename,symbol):\n return compile(source,filename,symbol,PyCF_DONT_IMPLY_DEDENT)\n \ndef compile_command(source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _maybe_compile(_compile,source,filename,symbol)\n \nclass Compile:\n ''\n\n\n \n def __init__(self):\n self.flags=PyCF_DONT_IMPLY_DEDENT\n \n def __call__(self,source,filename,symbol):\n codeob=compile(source,filename,symbol,self.flags,1)\n for feature in _features:\n if codeob.co_flags&feature.compiler_flag:\n self.flags |=feature.compiler_flag\n return codeob\n \nclass CommandCompiler:\n ''\n\n\n\n \n \n def __init__(self,):\n self.compiler=Compile()\n \n def __call__(self,source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _maybe_compile(self.compiler,source,filename,symbol)\n", ["__future__"]], "colorsys": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\"rgb_to_yiq\",\"yiq_to_rgb\",\"rgb_to_hls\",\"hls_to_rgb\",\n\"rgb_to_hsv\",\"hsv_to_rgb\"]\n\n\n\nONE_THIRD=1.0 /3.0\nONE_SIXTH=1.0 /6.0\nTWO_THIRD=2.0 /3.0\n\n\n\n\n\n\n\n\ndef rgb_to_yiq(r,g,b):\n y=0.30 *r+0.59 *g+0.11 *b\n i=0.74 *(r -y)-0.27 *(b -y)\n q=0.48 *(r -y)+0.41 *(b -y)\n return (y,i,q)\n \ndef yiq_to_rgb(y,i,q):\n\n\n\n\n r=y+0.9468822170900693 *i+0.6235565819861433 *q\n g=y -0.27478764629897834 *i -0.6356910791873801 *q\n b=y -1.1085450346420322 *i+1.7090069284064666 *q\n \n if r <0.0:\n r=0.0\n if g <0.0:\n g=0.0\n if b <0.0:\n b=0.0\n if r >1.0:\n r=1.0\n if g >1.0:\n g=1.0\n if b >1.0:\n b=1.0\n return (r,g,b)\n \n \n \n \n \n \n \ndef rgb_to_hls(r,g,b):\n maxc=max(r,g,b)\n minc=min(r,g,b)\n \n l=(minc+maxc)/2.0\n if minc ==maxc:\n return 0.0,l,0.0\n if l <=0.5:\n s=(maxc -minc)/(maxc+minc)\n else :\n s=(maxc -minc)/(2.0 -maxc -minc)\n rc=(maxc -r)/(maxc -minc)\n gc=(maxc -g)/(maxc -minc)\n bc=(maxc -b)/(maxc -minc)\n if r ==maxc:\n h=bc -gc\n elif g ==maxc:\n h=2.0+rc -bc\n else :\n h=4.0+gc -rc\n h=(h /6.0)%1.0\n return h,l,s\n \ndef hls_to_rgb(h,l,s):\n if s ==0.0:\n return l,l,l\n if l <=0.5:\n m2=l *(1.0+s)\n else :\n m2=l+s -(l *s)\n m1=2.0 *l -m2\n return (_v(m1,m2,h+ONE_THIRD),_v(m1,m2,h),_v(m1,m2,h -ONE_THIRD))\n \ndef _v(m1,m2,hue):\n hue=hue %1.0\n if hue MAX_INTERPOLATION_DEPTH:\n raise InterpolationDepthError(option,section,rawval)\n while rest:\n p=rest.find(\"%\")\n if p <0:\n accum.append(rest)\n return\n if p >0:\n accum.append(rest[:p])\n rest=rest[p:]\n \n c=rest[1:2]\n if c ==\"%\":\n accum.append(\"%\")\n rest=rest[2:]\n elif c ==\"(\":\n m=self._KEYCRE.match(rest)\n if m is None :\n raise InterpolationSyntaxError(option,section,\n \"bad interpolation variable reference %r\"%rest)\n var=parser.optionxform(m.group(1))\n rest=rest[m.end():]\n try :\n v=map[var]\n except KeyError:\n raise InterpolationMissingOptionError(\n option,section,rawval,var)from None\n if \"%\"in v:\n self._interpolate_some(parser,option,accum,v,\n section,map,depth+1)\n else :\n accum.append(v)\n else :\n raise InterpolationSyntaxError(\n option,section,\n \"'%%' must be followed by '%%' or '(', \"\n \"found: %r\"%(rest,))\n \n \nclass ExtendedInterpolation(Interpolation):\n ''\n \n \n _KEYCRE=re.compile(r\"\\$\\{([^}]+)\\}\")\n \n def before_get(self,parser,section,option,value,defaults):\n L=[]\n self._interpolate_some(parser,option,L,value,section,defaults,1)\n return ''.join(L)\n \n def before_set(self,parser,section,option,value):\n tmp_value=value.replace('$$','')\n tmp_value=self._KEYCRE.sub('',tmp_value)\n if '$'in tmp_value:\n raise ValueError(\"invalid interpolation syntax in %r at \"\n \"position %d\"%(value,tmp_value.find('$')))\n return value\n \n def _interpolate_some(self,parser,option,accum,rest,section,map,\n depth):\n rawval=parser.get(section,option,raw=True ,fallback=rest)\n if depth >MAX_INTERPOLATION_DEPTH:\n raise InterpolationDepthError(option,section,rawval)\n while rest:\n p=rest.find(\"$\")\n if p <0:\n accum.append(rest)\n return\n if p >0:\n accum.append(rest[:p])\n rest=rest[p:]\n \n c=rest[1:2]\n if c ==\"$\":\n accum.append(\"$\")\n rest=rest[2:]\n elif c ==\"{\":\n m=self._KEYCRE.match(rest)\n if m is None :\n raise InterpolationSyntaxError(option,section,\n \"bad interpolation variable reference %r\"%rest)\n path=m.group(1).split(':')\n rest=rest[m.end():]\n sect=section\n opt=option\n try :\n if len(path)==1:\n opt=parser.optionxform(path[0])\n v=map[opt]\n elif len(path)==2:\n sect=path[0]\n opt=parser.optionxform(path[1])\n v=parser.get(sect,opt,raw=True )\n else :\n raise InterpolationSyntaxError(\n option,section,\n \"More than one ':' found: %r\"%(rest,))\n except (KeyError,NoSectionError,NoOptionError):\n raise InterpolationMissingOptionError(\n option,section,rawval,\":\".join(path))from None\n if \"$\"in v:\n self._interpolate_some(parser,opt,accum,v,sect,\n dict(parser.items(sect,raw=True )),\n depth+1)\n else :\n accum.append(v)\n else :\n raise InterpolationSyntaxError(\n option,section,\n \"'$' must be followed by '$' or '{', \"\n \"found: %r\"%(rest,))\n \n \nclass LegacyInterpolation(Interpolation):\n ''\n \n \n _KEYCRE=re.compile(r\"%\\(([^)]*)\\)s|.\")\n \n def before_get(self,parser,section,option,value,vars):\n rawval=value\n depth=MAX_INTERPOLATION_DEPTH\n while depth:\n depth -=1\n if value and \"%(\"in value:\n replace=functools.partial(self._interpolation_replace,\n parser=parser)\n value=self._KEYCRE.sub(replace,value)\n try :\n value=value %vars\n except KeyError as e:\n raise InterpolationMissingOptionError(\n option,section,rawval,e.args[0])from None\n else :\n break\n if value and \"%(\"in value:\n raise InterpolationDepthError(option,section,rawval)\n return value\n \n def before_set(self,parser,section,option,value):\n return value\n \n @staticmethod\n def _interpolation_replace(match,parser):\n s=match.group(1)\n if s is None :\n return match.group()\n else :\n return \"%%(%s)s\"%parser.optionxform(s)\n \n \nclass RawConfigParser(MutableMapping):\n ''\n \n \n _SECT_TMPL=r\"\"\"\n \\[ # [\n (?P

[^]]+) # very permissive!\n \\] # ]\n \"\"\"\n _OPT_TMPL=r\"\"\"\n (?Pn'%(\n toprefix,num_chg)\n else :\n in_change=False\n \n if not flaglist:\n flaglist=[False ]\n next_id=['']\n next_href=['']\n last=0\n if context:\n fromlist=[' No Differences Found ']\n tolist=fromlist\n else :\n fromlist=tolist=[' Empty File ']\n \n if not flaglist[0]:\n next_href[0]='f'%toprefix\n \n next_href[last]='t'%(toprefix)\n \n return fromlist,tolist,flaglist,next_href,next_id\n \n def make_table(self,fromlines,tolines,fromdesc='',todesc='',context=False ,\n numlines=5):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n self._make_prefix()\n \n \n \n fromlines,tolines=self._tab_newline_replace(fromlines,tolines)\n \n \n if context:\n context_lines=numlines\n else :\n context_lines=None\n diffs=_mdiff(fromlines,tolines,context_lines,linejunk=self._linejunk,\n charjunk=self._charjunk)\n \n \n if self._wrapcolumn:\n diffs=self._line_wrapper(diffs)\n \n \n fromlist,tolist,flaglist=self._collect_lines(diffs)\n \n \n fromlist,tolist,flaglist,next_href,next_id=self._convert_flags(\n fromlist,tolist,flaglist,context,numlines)\n \n s=[]\n fmt=' %s%s'+\\\n '%s%s\\n'\n for i in range(len(flaglist)):\n if flaglist[i]is None :\n \n \n if i >0:\n s.append(' \\n \\n')\n else :\n s.append(fmt %(next_id[i],next_href[i],fromlist[i],\n next_href[i],tolist[i]))\n if fromdesc or todesc:\n header_row='%s%s%s%s'%(\n '
',\n '%s'%fromdesc,\n '
',\n '%s'%todesc)\n else :\n header_row=''\n \n table=self._table_template %dict(\n data_rows=''.join(s),\n header_row=header_row,\n prefix=self._prefix[1])\n \n return table.replace('\\0+','').\\\n replace('\\0-','').\\\n replace('\\0^','').\\\n replace('\\1','').\\\n replace('\\t',' ')\n \ndel re\n\ndef restore(delta,which):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n tag={1:\"- \",2:\"+ \"}[int(which)]\n except KeyError:\n raise ValueError('unknown delta choice (must be 1 or 2): %r'\n %which)from None\n prefixes=(\" \",tag)\n for line in delta:\n if line[:2]in prefixes:\n yield line[2:]\n \ndef _test():\n import doctest,difflib\n return doctest.testmod(difflib)\n \nif __name__ ==\"__main__\":\n _test()\n", ["collections", "difflib", "doctest", "heapq", "re"]], "doctest": [".py", "\n\n\n\n\n\n\n\nr\"\"\"Module doctest -- a framework for running examples in docstrings.\n\nIn simplest use, end each module M to be tested with:\n\ndef _test():\n import doctest\n doctest.testmod()\n\nif __name__ == \"__main__\":\n _test()\n\nThen running the module as a script will cause the examples in the\ndocstrings to get executed and verified:\n\npython M.py\n\nThis won't display anything unless an example fails, in which case the\nfailing example(s) and the cause(s) of the failure(s) are printed to stdout\n(why not stderr? because stderr is a lame hack <0.2 wink>), and the final\nline of output is \"Test failed.\".\n\nRun it with the -v switch instead:\n\npython M.py -v\n\nand a detailed report of all examples tried is printed to stdout, along\nwith assorted summaries at the end.\n\nYou can force verbose mode by passing \"verbose=True\" to testmod, or prohibit\nit by passing \"verbose=False\". In either of those cases, sys.argv is not\nexamined by testmod.\n\nThere are a variety of other ways to run doctests, including integration\nwith the unittest framework, and support for running non-Python text\nfiles containing doctests. There are also many ways to override parts\nof doctest's default behaviors. See the Library Reference Manual for\ndetails.\n\"\"\"\n\n__docformat__='reStructuredText en'\n\n__all__=[\n\n'register_optionflag',\n'DONT_ACCEPT_TRUE_FOR_1',\n'DONT_ACCEPT_BLANKLINE',\n'NORMALIZE_WHITESPACE',\n'ELLIPSIS',\n'SKIP',\n'IGNORE_EXCEPTION_DETAIL',\n'COMPARISON_FLAGS',\n'REPORT_UDIFF',\n'REPORT_CDIFF',\n'REPORT_NDIFF',\n'REPORT_ONLY_FIRST_FAILURE',\n'REPORTING_FLAGS',\n'FAIL_FAST',\n\n\n'Example',\n'DocTest',\n\n'DocTestParser',\n\n'DocTestFinder',\n\n'DocTestRunner',\n'OutputChecker',\n'DocTestFailure',\n'UnexpectedException',\n'DebugRunner',\n\n'testmod',\n'testfile',\n'run_docstring_examples',\n\n'DocTestSuite',\n'DocFileSuite',\n'set_unittest_reportflags',\n\n'script_from_examples',\n'testsource',\n'debug_src',\n'debug',\n]\n\nimport __future__\nimport difflib\nimport inspect\nimport linecache\nimport os\nimport pdb\nimport re\nimport sys\nimport traceback\nimport unittest\nfrom io import StringIO\nfrom collections import namedtuple\n\nTestResults=namedtuple('TestResults','failed attempted')\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nOPTIONFLAGS_BY_NAME={}\ndef register_optionflag(name):\n\n return OPTIONFLAGS_BY_NAME.setdefault(name,1 <=2\n \n \n startpos,endpos=0,len(got)\n w=ws[0]\n if w:\n if got.startswith(w):\n startpos=len(w)\n del ws[0]\n else :\n return False\n w=ws[-1]\n if w:\n if got.endswith(w):\n endpos -=len(w)\n del ws[-1]\n else :\n return False\n \n if startpos >endpos:\n \n \n return False\n \n \n \n \n for w in ws:\n \n \n \n startpos=got.find(w,startpos,endpos)\n if startpos <0:\n return False\n startpos +=len(w)\n \n return True\n \ndef _comment_line(line):\n ''\n line=line.rstrip()\n if line:\n return '# '+line\n else :\n return '#'\n \ndef _strip_exception_details(msg):\n\n\n\n\n\n\n\n\n\n\n start,end=0,len(msg)\n \n i=msg.find(\"\\n\")\n if i >=0:\n end=i\n \n i=msg.find(':',0,end)\n if i >=0:\n end=i\n \n i=msg.rfind('.',0,end)\n if i >=0:\n start=i+1\n return msg[start:end]\n \nclass _OutputRedirectingPdb(pdb.Pdb):\n ''\n\n\n\n \n def __init__(self,out):\n self.__out=out\n self.__debugger_used=False\n \n pdb.Pdb.__init__(self,stdout=out,nosigint=True )\n \n self.use_rawinput=1\n \n def set_trace(self,frame=None ):\n self.__debugger_used=True\n if frame is None :\n frame=sys._getframe().f_back\n pdb.Pdb.set_trace(self,frame)\n \n def set_continue(self):\n \n \n if self.__debugger_used:\n pdb.Pdb.set_continue(self)\n \n def trace_dispatch(self,*args):\n \n save_stdout=sys.stdout\n sys.stdout=self.__out\n \n try :\n return pdb.Pdb.trace_dispatch(self,*args)\n finally :\n sys.stdout=save_stdout\n \n \ndef _module_relative_path(module,test_path):\n if not inspect.ismodule(module):\n raise TypeError('Expected a module: %r'%module)\n if test_path.startswith('/'):\n raise ValueError('Module-relative files may not have absolute paths')\n \n \n test_path=os.path.join(*(test_path.split('/')))\n \n \n if hasattr(module,'__file__'):\n \n basedir=os.path.split(module.__file__)[0]\n elif module.__name__ =='__main__':\n \n if len(sys.argv)>0 and sys.argv[0]!='':\n basedir=os.path.split(sys.argv[0])[0]\n else :\n basedir=os.curdir\n else :\n if hasattr(module,'__path__'):\n for directory in module.__path__:\n fullpath=os.path.join(directory,test_path)\n if os.path.exists(fullpath):\n return fullpath\n \n \n raise ValueError(\"Can't resolve paths relative to the module \"\n \"%r (it has no __file__)\"\n %module.__name__)\n \n \n return os.path.join(basedir,test_path)\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Example:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,source,want,exc_msg=None ,lineno=0,indent=0,\n options=None ):\n \n if not source.endswith('\\n'):\n source +='\\n'\n if want and not want.endswith('\\n'):\n want +='\\n'\n if exc_msg is not None and not exc_msg.endswith('\\n'):\n exc_msg +='\\n'\n \n self.source=source\n self.want=want\n self.lineno=lineno\n self.indent=indent\n if options is None :options={}\n self.options=options\n self.exc_msg=exc_msg\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self.source ==other.source and\\\n self.want ==other.want and\\\n self.lineno ==other.lineno and\\\n self.indent ==other.indent and\\\n self.options ==other.options and\\\n self.exc_msg ==other.exc_msg\n \n def __hash__(self):\n return hash((self.source,self.want,self.lineno,self.indent,\n self.exc_msg))\n \nclass DocTest:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,examples,globs,name,filename,lineno,docstring):\n ''\n\n\n \n assert not isinstance(examples,str),\\\n \"DocTest no longer accepts str; use DocTestParser instead\"\n self.examples=examples\n self.docstring=docstring\n self.globs=globs.copy()\n self.name=name\n self.filename=filename\n self.lineno=lineno\n \n def __repr__(self):\n if len(self.examples)==0:\n examples='no examples'\n elif len(self.examples)==1:\n examples='1 example'\n else :\n examples='%d examples'%len(self.examples)\n return ('<%s %s from %s:%s (%s)>'%\n (self.__class__.__name__,\n self.name,self.filename,self.lineno,examples))\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self.examples ==other.examples and\\\n self.docstring ==other.docstring and\\\n self.globs ==other.globs and\\\n self.name ==other.name and\\\n self.filename ==other.filename and\\\n self.lineno ==other.lineno\n \n def __hash__(self):\n return hash((self.docstring,self.name,self.filename,self.lineno))\n \n \n def __lt__(self,other):\n if not isinstance(other,DocTest):\n return NotImplemented\n return ((self.name,self.filename,self.lineno,id(self))\n <\n (other.name,other.filename,other.lineno,id(other)))\n \n \n \n \n \nclass DocTestParser:\n ''\n\n \n \n \n \n \n \n _EXAMPLE_RE=re.compile(r'''\n # Source consists of a PS1 line followed by zero or more PS2 lines.\n (?P\n (?:^(?P [ ]*) >>> .*) # PS1 line\n (?:\\n [ ]* \\.\\.\\. .*)*) # PS2 lines\n \\n?\n # Want consists of any non-blank lines that do not start with PS1.\n (?P (?:(?![ ]*$) # Not a blank line\n (?![ ]*>>>) # Not a line starting with PS1\n .+$\\n? # But any other line\n )*)\n ''',re.MULTILINE |re.VERBOSE)\n \n \n \n \n \n \n \n \n \n \n _EXCEPTION_RE=re.compile(r\"\"\"\n # Grab the traceback header. Different versions of Python have\n # said different things on the first traceback line.\n ^(?P Traceback\\ \\(\n (?: most\\ recent\\ call\\ last\n | innermost\\ last\n ) \\) :\n )\n \\s* $ # toss trailing whitespace on the header.\n (?P .*?) # don't blink: absorb stuff until...\n ^ (?P \\w+ .*) # a line *starts* with alphanum.\n \"\"\",re.VERBOSE |re.MULTILINE |re.DOTALL)\n \n \n \n _IS_BLANK_OR_COMMENT=re.compile(r'^[ ]*(#.*)?$').match\n \n def parse(self,string,name=''):\n ''\n\n\n\n\n\n \n string=string.expandtabs()\n \n min_indent=self._min_indent(string)\n if min_indent >0:\n string='\\n'.join([l[min_indent:]for l in string.split('\\n')])\n \n output=[]\n charno,lineno=0,0\n \n for m in self._EXAMPLE_RE.finditer(string):\n \n output.append(string[charno:m.start()])\n \n lineno +=string.count('\\n',charno,m.start())\n \n (source,options,want,exc_msg)=\\\n self._parse_example(m,name,lineno)\n \n if not self._IS_BLANK_OR_COMMENT(source):\n output.append(Example(source,want,exc_msg,\n lineno=lineno,\n indent=min_indent+len(m.group('indent')),\n options=options))\n \n lineno +=string.count('\\n',m.start(),m.end())\n \n charno=m.end()\n \n output.append(string[charno:])\n return output\n \n def get_doctest(self,string,globs,name,filename,lineno):\n ''\n\n\n\n\n\n\n \n return DocTest(self.get_examples(string,name),globs,\n name,filename,lineno,string)\n \n def get_examples(self,string,name=''):\n ''\n\n\n\n\n\n\n\n\n \n return [x for x in self.parse(string,name)\n if isinstance(x,Example)]\n \n def _parse_example(self,m,name,lineno):\n ''\n\n\n\n\n\n\n\n\n \n \n indent=len(m.group('indent'))\n \n \n \n source_lines=m.group('source').split('\\n')\n self._check_prompt_blank(source_lines,indent,name,lineno)\n self._check_prefix(source_lines[1:],' '*indent+'.',name,lineno)\n source='\\n'.join([sl[indent+4:]for sl in source_lines])\n \n \n \n \n want=m.group('want')\n want_lines=want.split('\\n')\n if len(want_lines)>1 and re.match(r' *$',want_lines[-1]):\n del want_lines[-1]\n self._check_prefix(want_lines,' '*indent,name,\n lineno+len(source_lines))\n want='\\n'.join([wl[indent:]for wl in want_lines])\n \n \n m=self._EXCEPTION_RE.match(want)\n if m:\n exc_msg=m.group('msg')\n else :\n exc_msg=None\n \n \n options=self._find_options(source,name,lineno)\n \n return source,options,want,exc_msg\n \n \n \n \n \n \n \n \n _OPTION_DIRECTIVE_RE=re.compile(r'#\\s*doctest:\\s*([^\\n\\'\"]*)$',\n re.MULTILINE)\n \n def _find_options(self,source,name,lineno):\n ''\n\n\n\n\n\n \n options={}\n \n for m in self._OPTION_DIRECTIVE_RE.finditer(source):\n option_strings=m.group(1).replace(',',' ').split()\n for option in option_strings:\n if (option[0]not in '+-'or\n option[1:]not in OPTIONFLAGS_BY_NAME):\n raise ValueError('line %r of the doctest for %s '\n 'has an invalid option: %r'%\n (lineno+1,name,option))\n flag=OPTIONFLAGS_BY_NAME[option[1:]]\n options[flag]=(option[0]=='+')\n if options and self._IS_BLANK_OR_COMMENT(source):\n raise ValueError('line %r of the doctest for %s has an option '\n 'directive on a line with no example: %r'%\n (lineno,name,source))\n return options\n \n \n \n _INDENT_RE=re.compile(r'^([ ]*)(?=\\S)',re.MULTILINE)\n \n def _min_indent(self,s):\n ''\n indents=[len(indent)for indent in self._INDENT_RE.findall(s)]\n if len(indents)>0:\n return min(indents)\n else :\n return 0\n \n def _check_prompt_blank(self,lines,indent,name,lineno):\n ''\n\n\n\n\n \n for i,line in enumerate(lines):\n if len(line)>=indent+4 and line[indent+3]!=' ':\n raise ValueError('line %r of the docstring for %s '\n 'lacks blank after %s: %r'%\n (lineno+i+1,name,\n line[indent:indent+3],line))\n \n def _check_prefix(self,lines,prefix,name,lineno):\n ''\n\n\n \n for i,line in enumerate(lines):\n if line and not line.startswith(prefix):\n raise ValueError('line %r of the docstring for %s has '\n 'inconsistent leading whitespace: %r'%\n (lineno+i+1,name,line))\n \n \n \n \n \n \nclass DocTestFinder:\n ''\n\n\n\n\n\n \n \n def __init__(self,verbose=False ,parser=DocTestParser(),\n recurse=True ,exclude_empty=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._parser=parser\n self._verbose=verbose\n self._recurse=recurse\n self._exclude_empty=exclude_empty\n \n def find(self,obj,name=None ,module=None ,globs=None ,extraglobs=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if name is None :\n name=getattr(obj,'__name__',None )\n if name is None :\n raise ValueError(\"DocTestFinder.find: name must be given \"\n \"when obj.__name__ doesn't exist: %r\"%\n (type(obj),))\n \n \n \n \n if module is False :\n module=None\n elif module is None :\n module=inspect.getmodule(obj)\n \n \n \n \n try :\n file=inspect.getsourcefile(obj)\n except TypeError:\n source_lines=None\n else :\n if not file:\n \n \n file=inspect.getfile(obj)\n if not file[0]+file[-2:]=='<]>':file=None\n if file is None :\n source_lines=None\n else :\n if module is not None :\n \n \n \n source_lines=linecache.getlines(file,module.__dict__)\n else :\n \n \n source_lines=linecache.getlines(file)\n if not source_lines:\n source_lines=None\n \n \n if globs is None :\n if module is None :\n globs={}\n else :\n globs=module.__dict__.copy()\n else :\n globs=globs.copy()\n if extraglobs is not None :\n globs.update(extraglobs)\n if '__name__'not in globs:\n globs['__name__']='__main__'\n \n \n tests=[]\n self._find(tests,obj,name,module,source_lines,globs,{})\n \n \n \n \n tests.sort()\n return tests\n \n def _from_module(self,module,object):\n ''\n\n\n \n if module is None :\n return True\n elif inspect.getmodule(object)is not None :\n return module is inspect.getmodule(object)\n elif inspect.isfunction(object):\n return module.__dict__ is object.__globals__\n elif inspect.ismethoddescriptor(object):\n if hasattr(object,'__objclass__'):\n obj_mod=object.__objclass__.__module__\n elif hasattr(object,'__module__'):\n obj_mod=object.__module__\n else :\n return True\n return module.__name__ ==obj_mod\n elif inspect.isclass(object):\n return module.__name__ ==object.__module__\n elif hasattr(object,'__module__'):\n return module.__name__ ==object.__module__\n elif isinstance(object,property):\n return True\n else :\n raise ValueError(\"object must be a class or function\")\n \n def _find(self,tests,obj,name,module,source_lines,globs,seen):\n ''\n\n\n \n if self._verbose:\n print('Finding tests in %s'%name)\n \n \n if id(obj)in seen:\n return\n seen[id(obj)]=1\n \n \n test=self._get_test(obj,name,module,globs,source_lines)\n if test is not None :\n tests.append(test)\n \n \n if inspect.ismodule(obj)and self._recurse:\n for valname,val in obj.__dict__.items():\n valname='%s.%s'%(name,valname)\n \n if ((inspect.isroutine(inspect.unwrap(val))\n or inspect.isclass(val))and\n self._from_module(module,val)):\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n \n if inspect.ismodule(obj)and self._recurse:\n for valname,val in getattr(obj,'__test__',{}).items():\n if not isinstance(valname,str):\n raise ValueError(\"DocTestFinder.find: __test__ keys \"\n \"must be strings: %r\"%\n (type(valname),))\n if not (inspect.isroutine(val)or inspect.isclass(val)or\n inspect.ismodule(val)or isinstance(val,str)):\n raise ValueError(\"DocTestFinder.find: __test__ values \"\n \"must be strings, functions, methods, \"\n \"classes, or modules: %r\"%\n (type(val),))\n valname='%s.__test__.%s'%(name,valname)\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n \n if inspect.isclass(obj)and self._recurse:\n for valname,val in obj.__dict__.items():\n \n if isinstance(val,staticmethod):\n val=getattr(obj,valname)\n if isinstance(val,classmethod):\n val=getattr(obj,valname).__func__\n \n \n if ((inspect.isroutine(val)or inspect.isclass(val)or\n isinstance(val,property))and\n self._from_module(module,val)):\n valname='%s.%s'%(name,valname)\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n def _get_test(self,obj,name,module,globs,source_lines):\n ''\n\n\n \n \n \n if isinstance(obj,str):\n docstring=obj\n else :\n try :\n if obj.__doc__ is None :\n docstring=''\n else :\n docstring=obj.__doc__\n if not isinstance(docstring,str):\n docstring=str(docstring)\n except (TypeError,AttributeError):\n docstring=''\n \n \n lineno=self._find_lineno(obj,source_lines)\n \n \n if self._exclude_empty and not docstring:\n return None\n \n \n if module is None :\n filename=None\n else :\n filename=getattr(module,'__file__',module.__name__)\n if filename[-4:]==\".pyc\":\n filename=filename[:-1]\n return self._parser.get_doctest(docstring,globs,name,\n filename,lineno)\n \n def _find_lineno(self,obj,source_lines):\n ''\n\n\n \n lineno=None\n \n \n if inspect.ismodule(obj):\n lineno=0\n \n \n \n \n if inspect.isclass(obj):\n if source_lines is None :\n return None\n pat=re.compile(r'^\\s*class\\s*%s\\b'%\n getattr(obj,'__name__','-'))\n for i,line in enumerate(source_lines):\n if pat.match(line):\n lineno=i\n break\n \n \n if inspect.ismethod(obj):obj=obj.__func__\n if inspect.isfunction(obj):obj=obj.__code__\n if inspect.istraceback(obj):obj=obj.tb_frame\n if inspect.isframe(obj):obj=obj.f_code\n if inspect.iscode(obj):\n lineno=getattr(obj,'co_firstlineno',None )-1\n \n \n \n \n \n \n if lineno is not None :\n if source_lines is None :\n return lineno+1\n pat=re.compile(r'(^|.*:)\\s*\\w*(\"|\\')')\n for lineno in range(lineno,len(source_lines)):\n if pat.match(source_lines[lineno]):\n return lineno\n \n \n return None\n \n \n \n \n \nclass DocTestRunner:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n DIVIDER=\"*\"*70\n \n def __init__(self,checker=None ,verbose=None ,optionflags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._checker=checker or OutputChecker()\n if verbose is None :\n verbose='-v'in sys.argv\n self._verbose=verbose\n self.optionflags=optionflags\n self.original_optionflags=optionflags\n \n \n self.tries=0\n self.failures=0\n self._name2ft={}\n \n \n self._fakeout=_SpoofOut()\n \n \n \n \n \n def report_start(self,out,test,example):\n ''\n\n\n \n if self._verbose:\n if example.want:\n out('Trying:\\n'+_indent(example.source)+\n 'Expecting:\\n'+_indent(example.want))\n else :\n out('Trying:\\n'+_indent(example.source)+\n 'Expecting nothing\\n')\n \n def report_success(self,out,test,example,got):\n ''\n\n\n \n if self._verbose:\n out(\"ok\\n\")\n \n def report_failure(self,out,test,example,got):\n ''\n\n \n out(self._failure_header(test,example)+\n self._checker.output_difference(example,got,self.optionflags))\n \n def report_unexpected_exception(self,out,test,example,exc_info):\n ''\n\n \n out(self._failure_header(test,example)+\n 'Exception raised:\\n'+_indent(_exception_traceback(exc_info)))\n \n def _failure_header(self,test,example):\n out=[self.DIVIDER]\n if test.filename:\n if test.lineno is not None and example.lineno is not None :\n lineno=test.lineno+example.lineno+1\n else :\n lineno='?'\n out.append('File \"%s\", line %s, in %s'%\n (test.filename,lineno,test.name))\n else :\n out.append('Line %s, in %s'%(example.lineno+1,test.name))\n out.append('Failed example:')\n source=example.source\n out.append(_indent(source))\n return '\\n'.join(out)\n \n \n \n \n \n def __run(self,test,compileflags,out):\n ''\n\n\n\n\n\n\n\n \n \n failures=tries=0\n \n \n \n original_optionflags=self.optionflags\n \n SUCCESS,FAILURE,BOOM=range(3)\n \n check=self._checker.check_output\n \n \n for examplenum,example in enumerate(test.examples):\n \n \n \n quiet=(self.optionflags&REPORT_ONLY_FIRST_FAILURE and\n failures >0)\n \n \n self.optionflags=original_optionflags\n if example.options:\n for (optionflag,val)in example.options.items():\n if val:\n self.optionflags |=optionflag\n else :\n self.optionflags &=~optionflag\n \n \n if self.optionflags&SKIP:\n continue\n \n \n tries +=1\n if not quiet:\n self.report_start(out,test,example)\n \n \n \n \n filename=''%(test.name,examplenum)\n \n \n \n \n try :\n \n exec(compile(example.source,filename,\"single\",\n compileflags,1),test.globs)\n self.debugger.set_continue()\n exception=None\n except KeyboardInterrupt:\n raise\n except :\n exception=sys.exc_info()\n self.debugger.set_continue()\n \n got=self._fakeout.getvalue()\n self._fakeout.truncate(0)\n outcome=FAILURE\n \n \n \n if exception is None :\n if check(example.want,got,self.optionflags):\n outcome=SUCCESS\n \n \n else :\n exc_msg=traceback.format_exception_only(*exception[:2])[-1]\n if not quiet:\n got +=_exception_traceback(exception)\n \n \n \n if example.exc_msg is None :\n outcome=BOOM\n \n \n elif check(example.exc_msg,exc_msg,self.optionflags):\n outcome=SUCCESS\n \n \n elif self.optionflags&IGNORE_EXCEPTION_DETAIL:\n if check(_strip_exception_details(example.exc_msg),\n _strip_exception_details(exc_msg),\n self.optionflags):\n outcome=SUCCESS\n \n \n if outcome is SUCCESS:\n if not quiet:\n self.report_success(out,test,example,got)\n elif outcome is FAILURE:\n if not quiet:\n self.report_failure(out,test,example,got)\n failures +=1\n elif outcome is BOOM:\n if not quiet:\n self.report_unexpected_exception(out,test,example,\n exception)\n failures +=1\n else :\n assert False ,(\"unknown outcome\",outcome)\n \n if failures and self.optionflags&FAIL_FAST:\n break\n \n \n self.optionflags=original_optionflags\n \n \n self.__record_outcome(test,failures,tries)\n return TestResults(failures,tries)\n \n def __record_outcome(self,test,f,t):\n ''\n\n\n \n f2,t2=self._name2ft.get(test.name,(0,0))\n self._name2ft[test.name]=(f+f2,t+t2)\n self.failures +=f\n self.tries +=t\n \n __LINECACHE_FILENAME_RE=re.compile(r'.+)'\n r'\\[(?P\\d+)\\]>$')\n def __patched_linecache_getlines(self,filename,module_globals=None ):\n m=self.__LINECACHE_FILENAME_RE.match(filename)\n if m and m.group('name')==self.test.name:\n example=self.test.examples[int(m.group('examplenum'))]\n return example.source.splitlines(keepends=True )\n else :\n return self.save_linecache_getlines(filename,module_globals)\n \n def run(self,test,compileflags=None ,out=None ,clear_globs=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.test=test\n \n if compileflags is None :\n compileflags=_extract_future_flags(test.globs)\n \n save_stdout=sys.stdout\n if out is None :\n encoding=save_stdout.encoding\n if encoding is None or encoding.lower()=='utf-8':\n out=save_stdout.write\n else :\n \n def out(s):\n s=str(s.encode(encoding,'backslashreplace'),encoding)\n save_stdout.write(s)\n sys.stdout=self._fakeout\n \n \n \n \n \n \n save_trace=sys.gettrace()\n save_set_trace=pdb.set_trace\n self.debugger=_OutputRedirectingPdb(save_stdout)\n self.debugger.reset()\n pdb.set_trace=self.debugger.set_trace\n \n \n \n self.save_linecache_getlines=linecache.getlines\n linecache.getlines=self.__patched_linecache_getlines\n \n \n save_displayhook=sys.displayhook\n sys.displayhook=sys.__displayhook__\n \n try :\n return self.__run(test,compileflags,out)\n finally :\n sys.stdout=save_stdout\n pdb.set_trace=save_set_trace\n sys.settrace(save_trace)\n linecache.getlines=self.save_linecache_getlines\n sys.displayhook=save_displayhook\n if clear_globs:\n test.globs.clear()\n import builtins\n builtins._=None\n \n \n \n \n def summarize(self,verbose=None ):\n ''\n\n\n\n\n\n\n\n\n \n if verbose is None :\n verbose=self._verbose\n notests=[]\n passed=[]\n failed=[]\n totalt=totalf=0\n for x in self._name2ft.items():\n name,(f,t)=x\n assert f <=t\n totalt +=t\n totalf +=f\n if t ==0:\n notests.append(name)\n elif f ==0:\n passed.append((name,t))\n else :\n failed.append(x)\n if verbose:\n if notests:\n print(len(notests),\"items had no tests:\")\n notests.sort()\n for thing in notests:\n print(\" \",thing)\n if passed:\n print(len(passed),\"items passed all tests:\")\n passed.sort()\n for thing,count in passed:\n print(\" %3d tests in %s\"%(count,thing))\n if failed:\n print(self.DIVIDER)\n print(len(failed),\"items had failures:\")\n failed.sort()\n for thing,(f,t)in failed:\n print(\" %3d of %3d in %s\"%(f,t,thing))\n if verbose:\n print(totalt,\"tests in\",len(self._name2ft),\"items.\")\n print(totalt -totalf,\"passed and\",totalf,\"failed.\")\n if totalf:\n print(\"***Test Failed***\",totalf,\"failures.\")\n elif verbose:\n print(\"Test passed.\")\n return TestResults(totalf,totalt)\n \n \n \n \n def merge(self,other):\n d=self._name2ft\n for name,(f,t)in other._name2ft.items():\n if name in d:\n \n \n \n \n f2,t2=d[name]\n f=f+f2\n t=t+t2\n d[name]=f,t\n \nclass OutputChecker:\n ''\n\n\n\n\n\n \n def _toAscii(self,s):\n ''\n\n \n return str(s.encode('ASCII','backslashreplace'),\"ASCII\")\n \n def check_output(self,want,got,optionflags):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n got=self._toAscii(got)\n want=self._toAscii(want)\n \n \n \n if got ==want:\n return True\n \n \n \n if not (optionflags&DONT_ACCEPT_TRUE_FOR_1):\n if (got,want)==(\"True\\n\",\"1\\n\"):\n return True\n if (got,want)==(\"False\\n\",\"0\\n\"):\n return True\n \n \n \n if not (optionflags&DONT_ACCEPT_BLANKLINE):\n \n want=re.sub(r'(?m)^%s\\s*?$'%re.escape(BLANKLINE_MARKER),\n '',want)\n \n \n got=re.sub(r'(?m)^[^\\S\\n]+$','',got)\n if got ==want:\n return True\n \n \n \n \n if optionflags&NORMALIZE_WHITESPACE:\n got=' '.join(got.split())\n want=' '.join(want.split())\n if got ==want:\n return True\n \n \n \n if optionflags&ELLIPSIS:\n if _ellipsis_match(want,got):\n return True\n \n \n return False\n \n \n def _do_a_fancy_diff(self,want,got,optionflags):\n \n if not optionflags&(REPORT_UDIFF |\n REPORT_CDIFF |\n REPORT_NDIFF):\n return False\n \n \n \n \n \n \n \n \n \n \n \n if optionflags&REPORT_NDIFF:\n return True\n \n \n return want.count('\\n')>2 and got.count('\\n')>2\n \n def output_difference(self,example,got,optionflags):\n ''\n\n\n\n\n \n want=example.want\n \n \n if not (optionflags&DONT_ACCEPT_BLANKLINE):\n got=re.sub('(?m)^[ ]*(?=\\n)',BLANKLINE_MARKER,got)\n \n \n if self._do_a_fancy_diff(want,got,optionflags):\n \n want_lines=want.splitlines(keepends=True )\n got_lines=got.splitlines(keepends=True )\n \n if optionflags&REPORT_UDIFF:\n diff=difflib.unified_diff(want_lines,got_lines,n=2)\n diff=list(diff)[2:]\n kind='unified diff with -expected +actual'\n elif optionflags&REPORT_CDIFF:\n diff=difflib.context_diff(want_lines,got_lines,n=2)\n diff=list(diff)[2:]\n kind='context diff with expected followed by actual'\n elif optionflags&REPORT_NDIFF:\n engine=difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)\n diff=list(engine.compare(want_lines,got_lines))\n kind='ndiff with -expected +actual'\n else :\n assert 0,'Bad diff option'\n \n diff=[line.rstrip()+'\\n'for line in diff]\n return 'Differences (%s):\\n'%kind+_indent(''.join(diff))\n \n \n \n if want and got:\n return 'Expected:\\n%sGot:\\n%s'%(_indent(want),_indent(got))\n elif want:\n return 'Expected:\\n%sGot nothing\\n'%_indent(want)\n elif got:\n return 'Expected nothing\\nGot:\\n%s'%_indent(got)\n else :\n return 'Expected nothing\\nGot nothing\\n'\n \nclass DocTestFailure(Exception):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,test,example,got):\n self.test=test\n self.example=example\n self.got=got\n \n def __str__(self):\n return str(self.test)\n \nclass UnexpectedException(Exception):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,test,example,exc_info):\n self.test=test\n self.example=example\n self.exc_info=exc_info\n \n def __str__(self):\n return str(self.test)\n \nclass DebugRunner(DocTestRunner):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def run(self,test,compileflags=None ,out=None ,clear_globs=True ):\n r=DocTestRunner.run(self,test,compileflags,out,False )\n if clear_globs:\n test.globs.clear()\n return r\n \n def report_unexpected_exception(self,out,test,example,exc_info):\n raise UnexpectedException(test,example,exc_info)\n \n def report_failure(self,out,test,example,got):\n raise DocTestFailure(test,example,got)\n \n \n \n \n \n \n \n \nmaster=None\n\ndef testmod(m=None ,name=None ,globs=None ,verbose=None ,\nreport=True ,optionflags=0,extraglobs=None ,\nraise_on_error=False ,exclude_empty=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global master\n \n \n if m is None :\n \n \n \n m=sys.modules.get('__main__')\n \n \n if not inspect.ismodule(m):\n raise TypeError(\"testmod: module required; %r\"%(m,))\n \n \n if name is None :\n name=m.__name__\n \n \n finder=DocTestFinder(exclude_empty=exclude_empty)\n \n if raise_on_error:\n runner=DebugRunner(verbose=verbose,optionflags=optionflags)\n else :\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n \n for test in finder.find(m,name,globs=globs,extraglobs=extraglobs):\n runner.run(test)\n \n if report:\n runner.summarize()\n \n if master is None :\n master=runner\n else :\n master.merge(runner)\n \n return TestResults(runner.failures,runner.tries)\n \ndef testfile(filename,module_relative=True ,name=None ,package=None ,\nglobs=None ,verbose=None ,report=True ,optionflags=0,\nextraglobs=None ,raise_on_error=False ,parser=DocTestParser(),\nencoding=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global master\n \n if package and not module_relative:\n raise ValueError(\"Package may only be specified for module-\"\n \"relative paths.\")\n \n \n text,filename=_load_testfile(filename,package,module_relative,\n encoding or \"utf-8\")\n \n \n if name is None :\n name=os.path.basename(filename)\n \n \n if globs is None :\n globs={}\n else :\n globs=globs.copy()\n if extraglobs is not None :\n globs.update(extraglobs)\n if '__name__'not in globs:\n globs['__name__']='__main__'\n \n if raise_on_error:\n runner=DebugRunner(verbose=verbose,optionflags=optionflags)\n else :\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n \n \n test=parser.get_doctest(text,globs,name,filename,0)\n runner.run(test)\n \n if report:\n runner.summarize()\n \n if master is None :\n master=runner\n else :\n master.merge(runner)\n \n return TestResults(runner.failures,runner.tries)\n \ndef run_docstring_examples(f,globs,verbose=False ,name=\"NoName\",\ncompileflags=None ,optionflags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n finder=DocTestFinder(verbose=verbose,recurse=False )\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n for test in finder.find(f,name,globs=globs):\n runner.run(test,compileflags=compileflags)\n \n \n \n \n \n_unittest_reportflags=0\n\ndef set_unittest_reportflags(flags):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _unittest_reportflags\n \n if (flags&REPORTING_FLAGS)!=flags:\n raise ValueError(\"Only reporting flags allowed\",flags)\n old=_unittest_reportflags\n _unittest_reportflags=flags\n return old\n \n \nclass DocTestCase(unittest.TestCase):\n\n def __init__(self,test,optionflags=0,setUp=None ,tearDown=None ,\n checker=None ):\n \n unittest.TestCase.__init__(self)\n self._dt_optionflags=optionflags\n self._dt_checker=checker\n self._dt_test=test\n self._dt_setUp=setUp\n self._dt_tearDown=tearDown\n \n def setUp(self):\n test=self._dt_test\n \n if self._dt_setUp is not None :\n self._dt_setUp(test)\n \n def tearDown(self):\n test=self._dt_test\n \n if self._dt_tearDown is not None :\n self._dt_tearDown(test)\n \n test.globs.clear()\n \n def runTest(self):\n test=self._dt_test\n old=sys.stdout\n new=StringIO()\n optionflags=self._dt_optionflags\n \n if not (optionflags&REPORTING_FLAGS):\n \n \n optionflags |=_unittest_reportflags\n \n runner=DocTestRunner(optionflags=optionflags,\n checker=self._dt_checker,verbose=False )\n \n try :\n runner.DIVIDER=\"-\"*70\n failures,tries=runner.run(\n test,out=new.write,clear_globs=False )\n finally :\n sys.stdout=old\n \n if failures:\n raise self.failureException(self.format_failure(new.getvalue()))\n \n def format_failure(self,err):\n test=self._dt_test\n if test.lineno is None :\n lineno='unknown line number'\n else :\n lineno='%s'%test.lineno\n lname='.'.join(test.name.split('.')[-1:])\n return ('Failed doctest test for %s\\n'\n ' File \"%s\", line %s, in %s\\n\\n%s'\n %(test.name,test.filename,lineno,lname,err)\n )\n \n def debug(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n self.setUp()\n runner=DebugRunner(optionflags=self._dt_optionflags,\n checker=self._dt_checker,verbose=False )\n runner.run(self._dt_test,clear_globs=False )\n self.tearDown()\n \n def id(self):\n return self._dt_test.name\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self._dt_test ==other._dt_test and\\\n self._dt_optionflags ==other._dt_optionflags and\\\n self._dt_setUp ==other._dt_setUp and\\\n self._dt_tearDown ==other._dt_tearDown and\\\n self._dt_checker ==other._dt_checker\n \n def __hash__(self):\n return hash((self._dt_optionflags,self._dt_setUp,self._dt_tearDown,\n self._dt_checker))\n \n def __repr__(self):\n name=self._dt_test.name.split('.')\n return \"%s (%s)\"%(name[-1],'.'.join(name[:-1]))\n \n __str__=__repr__\n \n def shortDescription(self):\n return \"Doctest: \"+self._dt_test.name\n \nclass SkipDocTestCase(DocTestCase):\n def __init__(self,module):\n self.module=module\n DocTestCase.__init__(self,None )\n \n def setUp(self):\n self.skipTest(\"DocTestSuite will not work with -O2 and above\")\n \n def test_skip(self):\n pass\n \n def shortDescription(self):\n return \"Skipping tests from %s\"%self.module.__name__\n \n __str__=shortDescription\n \n \nclass _DocTestSuite(unittest.TestSuite):\n\n def _removeTestAtIndex(self,index):\n pass\n \n \ndef DocTestSuite(module=None ,globs=None ,extraglobs=None ,test_finder=None ,\n**options):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if test_finder is None :\n test_finder=DocTestFinder()\n \n module=_normalize_module(module)\n tests=test_finder.find(module,globs=globs,extraglobs=extraglobs)\n \n if not tests and sys.flags.optimize >=2:\n \n suite=_DocTestSuite()\n suite.addTest(SkipDocTestCase(module))\n return suite\n \n tests.sort()\n suite=_DocTestSuite()\n \n for test in tests:\n if len(test.examples)==0:\n continue\n if not test.filename:\n filename=module.__file__\n if filename[-4:]==\".pyc\":\n filename=filename[:-1]\n test.filename=filename\n suite.addTest(DocTestCase(test,**options))\n \n return suite\n \nclass DocFileCase(DocTestCase):\n\n def id(self):\n return '_'.join(self._dt_test.name.split('.'))\n \n def __repr__(self):\n return self._dt_test.filename\n __str__=__repr__\n \n def format_failure(self,err):\n return ('Failed doctest test for %s\\n File \"%s\", line 0\\n\\n%s'\n %(self._dt_test.name,self._dt_test.filename,err)\n )\n \ndef DocFileTest(path,module_relative=True ,package=None ,\nglobs=None ,parser=DocTestParser(),\nencoding=None ,**options):\n if globs is None :\n globs={}\n else :\n globs=globs.copy()\n \n if package and not module_relative:\n raise ValueError(\"Package may only be specified for module-\"\n \"relative paths.\")\n \n \n doc,path=_load_testfile(path,package,module_relative,\n encoding or \"utf-8\")\n \n if \"__file__\"not in globs:\n globs[\"__file__\"]=path\n \n \n name=os.path.basename(path)\n \n \n test=parser.get_doctest(doc,globs,name,path,0)\n return DocFileCase(test,**options)\n \ndef DocFileSuite(*paths,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n suite=_DocTestSuite()\n \n \n \n \n if kw.get('module_relative',True ):\n kw['package']=_normalize_module(kw.get('package'))\n \n for path in paths:\n suite.addTest(DocFileTest(path,**kw))\n \n return suite\n \n \n \n \n \ndef script_from_examples(s):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n output=[]\n for piece in DocTestParser().parse(s):\n if isinstance(piece,Example):\n \n output.append(piece.source[:-1])\n \n want=piece.want\n if want:\n output.append('# Expected:')\n output +=['## '+l for l in want.split('\\n')[:-1]]\n else :\n \n output +=[_comment_line(l)\n for l in piece.split('\\n')[:-1]]\n \n \n while output and output[-1]=='#':\n output.pop()\n while output and output[0]=='#':\n output.pop(0)\n \n \n return '\\n'.join(output)+'\\n'\n \ndef testsource(module,name):\n ''\n\n\n\n\n \n module=_normalize_module(module)\n tests=DocTestFinder().find(module)\n test=[t for t in tests if t.name ==name]\n if not test:\n raise ValueError(name,\"not found in tests\")\n test=test[0]\n testsrc=script_from_examples(test.docstring)\n return testsrc\n \ndef debug_src(src,pm=False ,globs=None ):\n ''\n testsrc=script_from_examples(src)\n debug_script(testsrc,pm,globs)\n \ndef debug_script(src,pm=False ,globs=None ):\n ''\n import pdb\n \n if globs:\n globs=globs.copy()\n else :\n globs={}\n \n if pm:\n try :\n exec(src,globs,globs)\n except :\n print(sys.exc_info()[1])\n p=pdb.Pdb(nosigint=True )\n p.reset()\n p.interaction(None ,sys.exc_info()[2])\n else :\n pdb.Pdb(nosigint=True ).run(\"exec(%r)\"%src,globs,globs)\n \ndef debug(module,name,pm=False ):\n ''\n\n\n\n\n \n module=_normalize_module(module)\n testsrc=testsource(module,name)\n debug_script(testsrc,pm,module.__dict__)\n \n \n \n \nclass _TestClass:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,val):\n ''\n\n\n\n\n \n \n self.val=val\n \n def square(self):\n ''\n\n\n\n \n \n self.val=self.val **2\n return self\n \n def get(self):\n ''\n\n\n\n\n \n \n return self.val\n \n__test__={\"_TestClass\":_TestClass,\n\"string\":r\"\"\"\n Example of a string object, searched as-is.\n >>> x = 1; y = 2\n >>> x + y, x * y\n (3, 2)\n \"\"\",\n\n\"bool-int equivalence\":r\"\"\"\n In 2.2, boolean expressions displayed\n 0 or 1. By default, we still accept\n them. This can be disabled by passing\n DONT_ACCEPT_TRUE_FOR_1 to the new\n optionflags argument.\n >>> 4 == 4\n 1\n >>> 4 == 4\n True\n >>> 4 > 4\n 0\n >>> 4 > 4\n False\n \"\"\",\n\n\"blank lines\":r\"\"\"\n Blank lines can be marked with :\n >>> print('foo\\n\\nbar\\n')\n foo\n \n bar\n \n \"\"\",\n\n\"ellipsis\":r\"\"\"\n If the ellipsis flag is used, then '...' can be used to\n elide substrings in the desired output:\n >>> print(list(range(1000))) #doctest: +ELLIPSIS\n [0, 1, 2, ..., 999]\n \"\"\",\n\n\"whitespace normalization\":r\"\"\"\n If the whitespace normalization flag is used, then\n differences in whitespace are ignored.\n >>> print(list(range(30))) #doctest: +NORMALIZE_WHITESPACE\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,\n 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,\n 27, 28, 29]\n \"\"\",\n}\n\n\ndef _test():\n import argparse\n \n parser=argparse.ArgumentParser(description=\"doctest runner\")\n parser.add_argument('-v','--verbose',action='store_true',default=False ,\n help='print very verbose output for all tests')\n parser.add_argument('-o','--option',action='append',\n choices=OPTIONFLAGS_BY_NAME.keys(),default=[],\n help=('specify a doctest option flag to apply'\n ' to the test run; may be specified more'\n ' than once to apply multiple options'))\n parser.add_argument('-f','--fail-fast',action='store_true',\n help=('stop running tests after first failure (this'\n ' is a shorthand for -o FAIL_FAST, and is'\n ' in addition to any other -o options)'))\n parser.add_argument('file',nargs='+',\n help='file containing the tests to run')\n args=parser.parse_args()\n testfiles=args.file\n \n \n verbose=args.verbose\n options=0\n for option in args.option:\n options |=OPTIONFLAGS_BY_NAME[option]\n if args.fail_fast:\n options |=FAIL_FAST\n for filename in testfiles:\n if filename.endswith(\".py\"):\n \n \n \n dirname,filename=os.path.split(filename)\n sys.path.insert(0,dirname)\n m=__import__(filename[:-3])\n del sys.path[0]\n failures,_=testmod(m,verbose=verbose,optionflags=options)\n else :\n failures,_=testfile(filename,module_relative=False ,\n verbose=verbose,optionflags=options)\n if failures:\n return 1\n return 0\n \n \nif __name__ ==\"__main__\":\n sys.exit(_test())\n", ["__future__", "argparse", "builtins", "collections", "difflib", "inspect", "io", "linecache", "os", "pdb", "re", "sys", "traceback", "unittest"]], "enum": [".py", "import sys\nfrom types import MappingProxyType,DynamicClassAttribute\n\n\ntry :\n from _collections import OrderedDict\nexcept ImportError:\n from collections import OrderedDict\n \n \n__all__=[\n'EnumMeta',\n'Enum','IntEnum','Flag','IntFlag',\n'auto','unique',\n]\n\n\ndef _is_descriptor(obj):\n ''\n return (\n hasattr(obj,'__get__')or\n hasattr(obj,'__set__')or\n hasattr(obj,'__delete__'))\n \n \ndef _is_dunder(name):\n ''\n return (name[:2]==name[-2:]=='__'and\n name[2:3]!='_'and\n name[-3:-2]!='_'and\n len(name)>4)\n \n \ndef _is_sunder(name):\n ''\n return (name[0]==name[-1]=='_'and\n name[1:2]!='_'and\n name[-2:-1]!='_'and\n len(name)>2)\n \ndef _make_class_unpicklable(cls):\n ''\n def _break_on_call_reduce(self,proto):\n raise TypeError('%r cannot be pickled'%self)\n cls.__reduce_ex__=_break_on_call_reduce\n cls.__module__=''\n \n_auto_null=object()\nclass auto:\n ''\n\n \n value=_auto_null\n \n \nclass _EnumDict(dict):\n ''\n\n\n\n\n \n def __init__(self):\n super().__init__()\n self._member_names=[]\n self._last_values=[]\n self._ignore=[]\n \n def __setitem__(self,key,value):\n ''\n\n\n\n\n\n\n \n if _is_sunder(key):\n if key not in (\n '_order_','_create_pseudo_member_',\n '_generate_next_value_','_missing_','_ignore_',\n ):\n raise ValueError('_names_ are reserved for future Enum use')\n if key =='_generate_next_value_':\n setattr(self,'_generate_next_value',value)\n elif key =='_ignore_':\n if isinstance(value,str):\n value=value.replace(',',' ').split()\n else :\n value=list(value)\n self._ignore=value\n already=set(value)&set(self._member_names)\n if already:\n raise ValueError('_ignore_ cannot specify already set names: %r'%(already,))\n elif _is_dunder(key):\n if key =='__order__':\n key='_order_'\n elif key in self._member_names:\n \n raise TypeError('Attempted to reuse key: %r'%key)\n elif key in self._ignore:\n pass\n elif not _is_descriptor(value):\n if key in self:\n \n raise TypeError('%r already defined as: %r'%(key,self[key]))\n if isinstance(value,auto):\n if value.value ==_auto_null:\n value.value=self._generate_next_value(key,1,len(self._member_names),self._last_values[:])\n value=value.value\n self._member_names.append(key)\n self._last_values.append(value)\n super().__setitem__(key,value)\n \n \n \n \n \nEnum=None\n\n\nclass EnumMeta(type):\n ''\n @classmethod\n def __prepare__(metacls,cls,bases):\n \n enum_dict=_EnumDict()\n \n member_type,first_enum=metacls._get_mixins_(bases)\n if first_enum is not None :\n enum_dict['_generate_next_value_']=getattr(first_enum,'_generate_next_value_',None )\n return enum_dict\n \n def __new__(metacls,cls,bases,classdict):\n \n \n \n \n \n \n classdict.setdefault('_ignore_',[]).append('_ignore_')\n ignore=classdict['_ignore_']\n for key in ignore:\n classdict.pop(key,None )\n member_type,first_enum=metacls._get_mixins_(bases)\n __new__,save_new,use_args=metacls._find_new_(classdict,member_type,\n first_enum)\n \n \n \n enum_members={k:classdict[k]for k in classdict._member_names}\n for name in classdict._member_names:\n del classdict[name]\n \n \n _order_=classdict.pop('_order_',None )\n \n \n invalid_names=set(enum_members)&{'mro',}\n if invalid_names:\n raise ValueError('Invalid enum member name: {0}'.format(\n ','.join(invalid_names)))\n \n \n if '__doc__'not in classdict:\n classdict['__doc__']='An enumeration.'\n \n \n enum_class=super().__new__(metacls,cls,bases,classdict)\n enum_class._member_names_=[]\n enum_class._member_map_=OrderedDict()\n enum_class._member_type_=member_type\n \n \n \n base_attributes={a for b in enum_class.mro()for a in b.__dict__}\n \n \n enum_class._value2member_map_={}\n \n \n \n \n \n \n \n \n \n \n \n if '__reduce_ex__'not in classdict:\n if member_type is not object:\n methods=('__getnewargs_ex__','__getnewargs__',\n '__reduce_ex__','__reduce__')\n if not any(m in member_type.__dict__ for m in methods):\n _make_class_unpicklable(enum_class)\n \n \n \n \n \n for member_name in classdict._member_names:\n value=enum_members[member_name]\n if not isinstance(value,tuple):\n args=(value,)\n else :\n args=value\n if member_type is tuple:\n args=(args,)\n if not use_args:\n enum_member=__new__(enum_class)\n if not hasattr(enum_member,'_value_'):\n enum_member._value_=value\n else :\n enum_member=__new__(enum_class,*args)\n if not hasattr(enum_member,'_value_'):\n if member_type is object:\n enum_member._value_=value\n else :\n enum_member._value_=member_type(*args)\n value=enum_member._value_\n enum_member._name_=member_name\n enum_member.__objclass__=enum_class\n enum_member.__init__(*args)\n \n \n for name,canonical_member in enum_class._member_map_.items():\n if canonical_member._value_ ==enum_member._value_:\n enum_member=canonical_member\n break\n else :\n \n enum_class._member_names_.append(member_name)\n \n \n if member_name not in base_attributes:\n setattr(enum_class,member_name,enum_member)\n \n enum_class._member_map_[member_name]=enum_member\n try :\n \n \n \n enum_class._value2member_map_[value]=enum_member\n except TypeError:\n pass\n \n \n \n for name in ('__repr__','__str__','__format__','__reduce_ex__'):\n class_method=getattr(enum_class,name)\n obj_method=getattr(member_type,name,None )\n enum_method=getattr(first_enum,name,None )\n if obj_method is not None and obj_method is class_method:\n setattr(enum_class,name,enum_method)\n \n \n \n if Enum is not None :\n \n \n if save_new:\n enum_class.__new_member__=__new__\n enum_class.__new__=Enum.__new__\n \n \n if _order_ is not None :\n if isinstance(_order_,str):\n _order_=_order_.replace(',',' ').split()\n if _order_ !=enum_class._member_names_:\n raise TypeError('member order does not match _order_')\n \n return enum_class\n \n def __bool__(self):\n ''\n\n \n return True\n \n def __call__(cls,value,names=None ,*,module=None ,qualname=None ,type=None ,start=1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if names is None :\n return cls.__new__(cls,value)\n \n return cls._create_(value,names,module=module,qualname=qualname,type=type,start=start)\n \n def __contains__(cls,member):\n if not isinstance(member,Enum):\n import warnings\n warnings.warn(\n \"using non-Enums in containment checks will raise \"\n \"TypeError in Python 3.8\",\n DeprecationWarning,2)\n return isinstance(member,cls)and member._name_ in cls._member_map_\n \n def __delattr__(cls,attr):\n \n \n if attr in cls._member_map_:\n raise AttributeError(\n \"%s: cannot delete Enum member.\"%cls.__name__)\n super().__delattr__(attr)\n \n def __dir__(self):\n return (['__class__','__doc__','__members__','__module__']+\n self._member_names_)\n \n def __getattr__(cls,name):\n ''\n\n\n\n\n\n\n \n if _is_dunder(name):\n raise AttributeError(name)\n try :\n return cls._member_map_[name]\n except KeyError:\n raise AttributeError(name)from None\n \n def __getitem__(cls,name):\n return cls._member_map_[name]\n \n def __iter__(cls):\n return (cls._member_map_[name]for name in cls._member_names_)\n \n def __len__(cls):\n return len(cls._member_names_)\n \n @property\n def __members__(cls):\n ''\n\n\n\n\n \n return MappingProxyType(cls._member_map_)\n \n def __repr__(cls):\n return \"\"%cls.__name__\n \n def __reversed__(cls):\n return (cls._member_map_[name]for name in reversed(cls._member_names_))\n \n def __setattr__(cls,name,value):\n ''\n\n\n\n\n\n \n member_map=cls.__dict__.get('_member_map_',{})\n if name in member_map:\n raise AttributeError('Cannot reassign members.')\n super().__setattr__(name,value)\n \n def _create_(cls,class_name,names,*,module=None ,qualname=None ,type=None ,start=1):\n ''\n\n\n\n\n\n\n\n\n\n \n metacls=cls.__class__\n bases=(cls,)if type is None else (type,cls)\n _,first_enum=cls._get_mixins_(bases)\n classdict=metacls.__prepare__(class_name,bases)\n \n \n if isinstance(names,str):\n names=names.replace(',',' ').split()\n if isinstance(names,(tuple,list))and names and isinstance(names[0],str):\n original_names,names=names,[]\n last_values=[]\n for count,name in enumerate(original_names):\n value=first_enum._generate_next_value_(name,start,count,last_values[:])\n last_values.append(value)\n names.append((name,value))\n \n \n for item in names:\n if isinstance(item,str):\n member_name,member_value=item,names[item]\n else :\n member_name,member_value=item\n classdict[member_name]=member_value\n enum_class=metacls.__new__(metacls,class_name,bases,classdict)\n \n \n \n if module is None :\n try :\n module=sys._getframe(2).f_globals['__name__']\n except (AttributeError,ValueError)as exc:\n pass\n if module is None :\n _make_class_unpicklable(enum_class)\n else :\n enum_class.__module__=module\n if qualname is not None :\n enum_class.__qualname__=qualname\n \n return enum_class\n \n @staticmethod\n def _get_mixins_(bases):\n ''\n\n\n\n\n \n if not bases:\n return object,Enum\n \n \n \n \n member_type=first_enum=None\n for base in bases:\n if (base is not Enum and\n issubclass(base,Enum)and\n base._member_names_):\n raise TypeError(\"Cannot extend enumerations\")\n \n if not issubclass(base,Enum):\n raise TypeError(\"new enumerations must be created as \"\n \"`ClassName([mixin_type,] enum_type)`\")\n \n \n \n if not issubclass(bases[0],Enum):\n member_type=bases[0]\n first_enum=bases[-1]\n else :\n for base in bases[0].__mro__:\n \n \n \n \n if issubclass(base,Enum):\n if first_enum is None :\n first_enum=base\n else :\n if member_type is None :\n member_type=base\n \n return member_type,first_enum\n \n @staticmethod\n def _find_new_(classdict,member_type,first_enum):\n ''\n\n\n\n\n\n \n \n \n \n __new__=classdict.get('__new__',None )\n \n \n save_new=__new__ is not None\n \n if __new__ is None :\n \n \n for method in ('__new_member__','__new__'):\n for possible in (member_type,first_enum):\n target=getattr(possible,method,None )\n if target not in {\n None ,\n None .__new__,\n object.__new__,\n Enum.__new__,\n }:\n __new__=target\n break\n if __new__ is not None :\n break\n else :\n __new__=object.__new__\n \n \n \n \n if __new__ is object.__new__:\n use_args=False\n else :\n use_args=True\n \n return __new__,save_new,use_args\n \n \nclass Enum(metaclass=EnumMeta):\n ''\n\n\n\n \n def __new__(cls,value):\n \n \n \n if type(value)is cls:\n \n return value\n \n \n try :\n if value in cls._value2member_map_:\n return cls._value2member_map_[value]\n except TypeError:\n \n for member in cls._member_map_.values():\n if member._value_ ==value:\n return member\n \n return cls._missing_(value)\n \n def _generate_next_value_(name,start,count,last_values):\n for last_value in reversed(last_values):\n try :\n return last_value+1\n except TypeError:\n pass\n else :\n return start\n \n @classmethod\n def _missing_(cls,value):\n raise ValueError(\"%r is not a valid %s\"%(value,cls.__name__))\n \n def __repr__(self):\n return \"<%s.%s: %r>\"%(\n self.__class__.__name__,self._name_,self._value_)\n \n def __str__(self):\n return \"%s.%s\"%(self.__class__.__name__,self._name_)\n \n def __dir__(self):\n added_behavior=[\n m\n for cls in self.__class__.mro()\n for m in cls.__dict__\n if m[0]!='_'and m not in self._member_map_\n ]\n return (['__class__','__doc__','__module__']+added_behavior)\n \n def __format__(self,format_spec):\n \n \n \n \n \n if self._member_type_ is object:\n cls=str\n val=str(self)\n \n else :\n cls=self._member_type_\n val=self._value_\n return cls.__format__(val,format_spec)\n \n def __hash__(self):\n return hash(self._name_)\n \n def __reduce_ex__(self,proto):\n return self.__class__,(self._value_,)\n \n \n \n \n \n \n \n \n @DynamicClassAttribute\n def name(self):\n ''\n return self._name_\n \n @DynamicClassAttribute\n def value(self):\n ''\n return self._value_\n \n @classmethod\n def _convert(cls,name,module,filter,source=None ):\n ''\n\n \n \n \n \n \n \n module_globals=vars(sys.modules[module])\n if source:\n source=vars(source)\n else :\n source=module_globals\n \n \n \n \n \n members=[\n (name,source[name])\n for name in source.keys()\n if filter(name)]\n try :\n \n members.sort(key=lambda t:(t[1],t[0]))\n except TypeError:\n \n members.sort(key=lambda t:t[0])\n cls=cls(name,members,module=module)\n cls.__reduce_ex__=_reduce_ex_by_name\n module_globals.update(cls.__members__)\n module_globals[name]=cls\n return cls\n \n \nclass IntEnum(int,Enum):\n ''\n \n \ndef _reduce_ex_by_name(self,proto):\n return self.name\n \nclass Flag(Enum):\n ''\n \n def _generate_next_value_(name,start,count,last_values):\n ''\n\n\n\n\n\n\n \n if not count:\n return start if start is not None else 1\n for last_value in reversed(last_values):\n try :\n high_bit=_high_bit(last_value)\n break\n except Exception:\n raise TypeError('Invalid Flag value: %r'%last_value)from None\n return 2 **(high_bit+1)\n \n @classmethod\n def _missing_(cls,value):\n original_value=value\n if value <0:\n value=~value\n possible_member=cls._create_pseudo_member_(value)\n if original_value <0:\n possible_member=~possible_member\n return possible_member\n \n @classmethod\n def _create_pseudo_member_(cls,value):\n ''\n\n \n pseudo_member=cls._value2member_map_.get(value,None )\n if pseudo_member is None :\n \n _,extra_flags=_decompose(cls,value)\n if extra_flags:\n raise ValueError(\"%r is not a valid %s\"%(value,cls.__name__))\n \n pseudo_member=object.__new__(cls)\n pseudo_member._name_=None\n pseudo_member._value_=value\n \n \n pseudo_member=cls._value2member_map_.setdefault(value,pseudo_member)\n return pseudo_member\n \n def __contains__(self,other):\n if not isinstance(other,self.__class__):\n import warnings\n warnings.warn(\n \"using non-Flags in containment checks will raise \"\n \"TypeError in Python 3.8\",\n DeprecationWarning,2)\n return False\n return other._value_&self._value_ ==other._value_\n \n def __repr__(self):\n cls=self.__class__\n if self._name_ is not None :\n return '<%s.%s: %r>'%(cls.__name__,self._name_,self._value_)\n members,uncovered=_decompose(cls,self._value_)\n return '<%s.%s: %r>'%(\n cls.__name__,\n '|'.join([str(m._name_ or m._value_)for m in members]),\n self._value_,\n )\n \n def __str__(self):\n cls=self.__class__\n if self._name_ is not None :\n return '%s.%s'%(cls.__name__,self._name_)\n members,uncovered=_decompose(cls,self._value_)\n if len(members)==1 and members[0]._name_ is None :\n return '%s.%r'%(cls.__name__,members[0]._value_)\n else :\n return '%s.%s'%(\n cls.__name__,\n '|'.join([str(m._name_ or m._value_)for m in members]),\n )\n \n def __bool__(self):\n return bool(self._value_)\n \n def __or__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return self.__class__(self._value_ |other._value_)\n \n def __and__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return self.__class__(self._value_&other._value_)\n \n def __xor__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return self.__class__(self._value_ ^other._value_)\n \n def __invert__(self):\n members,uncovered=_decompose(self.__class__,self._value_)\n inverted=self.__class__(0)\n for m in self.__class__:\n if m not in members and not (m._value_&self._value_):\n inverted=inverted |m\n return self.__class__(inverted)\n \n \nclass IntFlag(int,Flag):\n ''\n \n @classmethod\n def _missing_(cls,value):\n if not isinstance(value,int):\n raise ValueError(\"%r is not a valid %s\"%(value,cls.__name__))\n new_member=cls._create_pseudo_member_(value)\n return new_member\n \n @classmethod\n def _create_pseudo_member_(cls,value):\n pseudo_member=cls._value2member_map_.get(value,None )\n if pseudo_member is None :\n need_to_create=[value]\n \n _,extra_flags=_decompose(cls,value)\n \n while extra_flags:\n \n bit=_high_bit(extra_flags)\n flag_value=2 **bit\n if (flag_value not in cls._value2member_map_ and\n flag_value not in need_to_create\n ):\n need_to_create.append(flag_value)\n if extra_flags ==-flag_value:\n extra_flags=0\n else :\n extra_flags ^=flag_value\n for value in reversed(need_to_create):\n \n pseudo_member=int.__new__(cls,value)\n pseudo_member._name_=None\n pseudo_member._value_=value\n \n \n pseudo_member=cls._value2member_map_.setdefault(value,pseudo_member)\n return pseudo_member\n \n def __or__(self,other):\n if not isinstance(other,(self.__class__,int)):\n return NotImplemented\n result=self.__class__(self._value_ |self.__class__(other)._value_)\n return result\n \n def __and__(self,other):\n if not isinstance(other,(self.__class__,int)):\n return NotImplemented\n return self.__class__(self._value_&self.__class__(other)._value_)\n \n def __xor__(self,other):\n if not isinstance(other,(self.__class__,int)):\n return NotImplemented\n return self.__class__(self._value_ ^self.__class__(other)._value_)\n \n __ror__=__or__\n __rand__=__and__\n __rxor__=__xor__\n \n def __invert__(self):\n result=self.__class__(~self._value_)\n return result\n \n \ndef _high_bit(value):\n ''\n return value.bit_length()-1\n \ndef unique(enumeration):\n ''\n duplicates=[]\n for name,member in enumeration.__members__.items():\n if name !=member.name:\n duplicates.append((name,member.name))\n if duplicates:\n alias_details=', '.join(\n [\"%s -> %s\"%(alias,name)for (alias,name)in duplicates])\n raise ValueError('duplicate values found in %r: %s'%\n (enumeration,alias_details))\n return enumeration\n \ndef _decompose(flag,value):\n ''\n \n not_covered=value\n negative=value <0\n \n \n \n if negative:\n \n flags_to_check=[\n (m,v)\n for v,m in list(flag._value2member_map_.items())\n if m.name is not None\n ]\n else :\n \n flags_to_check=[\n (m,v)\n for v,m in list(flag._value2member_map_.items())\n if m.name is not None or _power_of_two(v)\n ]\n members=[]\n for member,member_value in flags_to_check:\n if member_value and member_value&value ==member_value:\n members.append(member)\n not_covered &=~member_value\n if not members and value in flag._value2member_map_:\n members.append(flag._value2member_map_[value])\n members.sort(key=lambda m:m._value_,reverse=True )\n if len(members)>1 and members[0].value ==value:\n \n members.pop(0)\n return members,not_covered\n \ndef _power_of_two(value):\n if value <1:\n return False\n return value ==2 **_high_bit(value)\n", ["_collections", "collections", "sys", "types", "warnings"]], "errno": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\nE2BIG=7\n\nEACCES=13\n\nEADDRINUSE=10048\n\nEADDRNOTAVAIL=10049\n\nEAFNOSUPPORT=10047\n\nEAGAIN=11\n\nEALREADY=10037\n\nEBADF=9\n\nEBADMSG=104\n\nEBUSY=16\n\nECANCELED=105\n\nECHILD=10\n\nECONNABORTED=10053\n\nECONNREFUSED=10061\n\nECONNRESET=10054\n\nEDEADLK=36\n\nEDEADLOCK=36\n\nEDESTADDRREQ=10039\n\nEDOM=33\n\nEDQUOT=10069\n\nEEXIST=17\n\nEFAULT=14\n\nEFBIG=27\n\nEHOSTDOWN=10064\n\nEHOSTUNREACH=10065\n\nEIDRM=111\n\nEILSEQ=42\n\nEINPROGRESS=10036\n\nEINTR=4\n\nEINVAL=22\n\nEIO=5\n\nEISCONN=10056\n\nEISDIR=21\n\nELOOP=10062\n\nEMFILE=24\n\nEMLINK=31\n\nEMSGSIZE=10040\n\nENAMETOOLONG=38\n\nENETDOWN=10050\n\nENETRESET=10052\n\nENETUNREACH=10051\n\nENFILE=23\n\nENOBUFS=10055\n\nENODATA=120\n\nENODEV=19\n\nENOENT=2\n\nENOEXEC=8\n\nENOLCK=39\n\nENOLINK=121\n\nENOMEM=12\n\nENOMSG=122\n\nENOPROTOOPT=10042\n\nENOSPC=28\n\nENOSR=124\n\nENOSTR=125\n\nENOSYS=40\n\nENOTCONN=10057\n\nENOTDIR=20\n\nENOTEMPTY=41\n\nENOTRECOVERABLE=127\n\nENOTSOCK=10038\n\nENOTSUP=129\n\nENOTTY=25\n\nENXIO=6\n\nEOPNOTSUPP=10045\n\nEOVERFLOW=132\n\nEOWNERDEAD=133\n\nEPERM=1\n\nEPFNOSUPPORT=10046\n\nEPIPE=32\n\nEPROTO=134\n\nEPROTONOSUPPORT=10043\n\nEPROTOTYPE=10041\n\nERANGE=34\n\nEREMOTE=10071\n\nEROFS=30\n\nESHUTDOWN=10058\n\nESOCKTNOSUPPORT=10044\n\nESPIPE=29\n\nESRCH=3\n\nESTALE=10070\n\nETIME=137\n\nETIMEDOUT=10060\n\nETOOMANYREFS=10059\n\nETXTBSY=139\n\nEUSERS=10068\n\nEWOULDBLOCK=10035\n\nEXDEV=18\n\nWSABASEERR=10000\n\nWSAEACCES=10013\n\nWSAEADDRINUSE=10048\n\nWSAEADDRNOTAVAIL=10049\n\nWSAEAFNOSUPPORT=10047\n\nWSAEALREADY=10037\n\nWSAEBADF=10009\n\nWSAECONNABORTED=10053\n\nWSAECONNREFUSED=10061\n\nWSAECONNRESET=10054\n\nWSAEDESTADDRREQ=10039\n\nWSAEDISCON=10101\n\nWSAEDQUOT=10069\n\nWSAEFAULT=10014\n\nWSAEHOSTDOWN=10064\n\nWSAEHOSTUNREACH=10065\n\nWSAEINPROGRESS=10036\n\nWSAEINTR=10004\n\nWSAEINVAL=10022\n\nWSAEISCONN=10056\n\nWSAELOOP=10062\n\nWSAEMFILE=10024\n\nWSAEMSGSIZE=10040\n\nWSAENAMETOOLONG=10063\n\nWSAENETDOWN=10050\n\nWSAENETRESET=10052\n\nWSAENETUNREACH=10051\n\nWSAENOBUFS=10055\n\nWSAENOPROTOOPT=10042\n\nWSAENOTCONN=10057\n\nWSAENOTEMPTY=10066\n\nWSAENOTSOCK=10038\n\nWSAEOPNOTSUPP=10045\n\nWSAEPFNOSUPPORT=10046\n\nWSAEPROCLIM=10067\n\nWSAEPROTONOSUPPORT=10043\n\nWSAEPROTOTYPE=10041\n\nWSAEREMOTE=10071\n\nWSAESHUTDOWN=10058\n\nWSAESOCKTNOSUPPORT=10044\n\nWSAESTALE=10070\n\nWSAETIMEDOUT=10060\n\nWSAETOOMANYREFS=10059\n\nWSAEUSERS=10068\n\nWSAEWOULDBLOCK=10035\n\nWSANOTINITIALISED=10093\n\nWSASYSNOTREADY=10091\n\nWSAVERNOTSUPPORTED=10092\n\nerrorcode={v:k for (k,v)in globals().items()if k ==k.upper()}\n", []], "external_import": [".py", "import os\nfrom browser import doc\nimport urllib.request\n\n\n\n\n\nclass ModuleFinder:\n def __init__(self,path_entry):\n print(\"external_import here..\")\n \n self._module=None\n if path_entry.startswith('http://'):\n self.path_entry=path_entry\n else :\n raise ImportError()\n \n def __str__(self):\n return '<%s for \"%s\">'%(self.__class__.__name__,self.path_entry)\n \n def find_module(self,fullname,path=None ):\n path=path or self.path_entry\n \n for _ext in ['js','pyj','py']:\n _fp,_url,_headers=urllib.request.urlopen(path+'/'+'%s.%s'%(fullname,_ext))\n self._module=_fp.read()\n _fp.close()\n if self._module is not None :\n print(\"module found at %s:%s\"%(path,fullname))\n return ModuleLoader(path,fullname,self._module)\n \n print('module %s not found'%fullname)\n raise ImportError()\n return None\n \nclass ModuleLoader:\n ''\n \n def __init__(self,filepath,name,module_source):\n self._filepath=filepath\n self._name=name\n self._module_source=module_source\n \n def get_source(self):\n return self._module_source\n \n def is_package(self):\n return '.'in self._name\n \n def load_module(self):\n if self._name in sys.modules:\n \n mod=sys.modules[self._name]\n return mod\n \n _src=self.get_source()\n if self._filepath.endswith('.js'):\n mod=JSObject(import_js_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.py'):\n mod=JSObject(import_py_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.pyj'):\n mod=JSObject(import_pyj_module(_src,self._filepath,self._name))\n else :\n raise ImportError('Invalid Module: %s'%self._filepath)\n \n \n mod.__file__=self._filepath\n mod.__name__=self._name\n mod.__path__=os.path.abspath(self._filepath)\n mod.__loader__=self\n mod.__package__='.'.join(self._name.split('.')[:-1])\n \n if self.is_package():\n print('adding path for package')\n \n \n mod.__path__=[self._filepath]\n else :\n print('imported as regular module')\n \n print('creating a new module object for \"%s\"'%self._name)\n sys.modules.setdefault(self._name,mod)\n JSObject(__BRYTHON__.imported)[self._name]=mod\n \n return mod\n", ["browser", "os", "urllib.request"]], "faulthandler": [".py", "''\n\n\n_EXCEPTION_ACCESS_VIOLATION=-1073741819\n\n_EXCEPTION_INT_DIVIDE_BY_ZERO=-1073741676\n\n_EXCEPTION_NONCONTINUABLE=1\n\n_EXCEPTION_NONCONTINUABLE_EXCEPTION=-1073741787\n\n_EXCEPTION_STACK_OVERFLOW=-1073741571\n\nclass __loader__(object):\n ''\n\n\n\n\n \n \n \n __delattr__=\"\"\n \n __dict__=\"{'__module__': '_frozen_importlib', '__doc__': 'Meta path import for built-in modules.\\n\\n All methods are either class or static methods to avoid the need to\\n instantiate the class.\\n\\n ', 'module_repr': , 'find_spec': , 'find_module': , 'create_module': , 'exec_module': , 'get_code': , 'get_source': , 'is_package': , 'load_module': , '__dict__': , '__weakref__': }\"\n \n __dir__=\"\"\n \n __eq__=\"\"\n \n __format__=\"\"\n \n __ge__=\"\"\n \n __getattribute__=\"\"\n \n __gt__=\"\"\n \n __hash__=\"\"\n \n __init__=\"\"\n \n def __init_subclass__(*args,**kw):\n ''\n\n\n \n pass\n \n __le__=\"\"\n \n __lt__=\"\"\n \n __module__=\"\"\"_frozen_importlib\"\"\"\n \n __ne__=\"\"\n \n def __new__(*args,**kw):\n ''\n pass\n \n __reduce__=\"\"\n \n __reduce_ex__=\"\"\n \n __repr__=\"\"\n \n __setattr__=\"\"\n \n __sizeof__=\"\"\n \n __str__=\"\"\n \n def __subclasshook__(*args,**kw):\n ''\n\n\n\n\n \n pass\n \n __weakref__=\"\"\n \n create_module=\">\"\n \n exec_module=\">\"\n \n find_module=\">\"\n \n find_spec=\">\"\n \n get_code=\">\"\n \n get_source=\">\"\n \n is_package=\">\"\n \n load_module=\">\"\n \n def module_repr(*args,**kw):\n ''\n\n\n \n pass\n__spec__=\"ModuleSpec(name='faulthandler', loader=, origin='built-in')\"\n\ndef _fatal_error(*args,**kw):\n ''\n pass\n \ndef _fatal_error_c_thread(*args,**kw):\n ''\n pass\n \ndef _raise_exception(*args,**kw):\n ''\n pass\n \ndef _read_null(*args,**kw):\n ''\n pass\n \ndef _sigabrt(*args,**kw):\n ''\n pass\n \ndef _sigfpe(*args,**kw):\n ''\n pass\n \ndef _sigsegv(*args,**kw):\n ''\n pass\n \ndef cancel_dump_traceback_later(*args,**kw):\n ''\n pass\n \ndef disable(*args,**kw):\n ''\n pass\n \ndef dump_traceback(*args,**kw):\n ''\n pass\n \ndef dump_traceback_later(*args,**kw):\n ''\n \n pass\n \ndef enable(*args,**kw):\n ''\n pass\n \ndef is_enabled(*args,**kw):\n ''\n pass\n", []], "fnmatch": [".py", "''\n\n\n\n\n\n\n\n\n\n\nimport os\nimport posixpath\nimport re\nimport functools\n\n__all__=[\"filter\",\"fnmatch\",\"fnmatchcase\",\"translate\"]\n\ndef fnmatch(name,pat):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n name=os.path.normcase(name)\n pat=os.path.normcase(pat)\n return fnmatchcase(name,pat)\n \n@functools.lru_cache(maxsize=256,typed=True )\ndef _compile_pattern(pat):\n if isinstance(pat,bytes):\n pat_str=str(pat,'ISO-8859-1')\n res_str=translate(pat_str)\n res=bytes(res_str,'ISO-8859-1')\n else :\n res=translate(pat)\n return re.compile(res).match\n \ndef filter(names,pat):\n ''\n result=[]\n pat=os.path.normcase(pat)\n match=_compile_pattern(pat)\n if os.path is posixpath:\n \n for name in names:\n if match(name):\n result.append(name)\n else :\n for name in names:\n if match(os.path.normcase(name)):\n result.append(name)\n return result\n \ndef fnmatchcase(name,pat):\n ''\n\n\n\n \n match=_compile_pattern(pat)\n return match(name)is not None\n \n \ndef translate(pat):\n ''\n\n\n \n \n i,n=0,len(pat)\n res=''\n while i =n:\n res=res+'\\\\['\n else :\n stuff=pat[i:j]\n if '--'not in stuff:\n stuff=stuff.replace('\\\\',r'\\\\')\n else :\n chunks=[]\n k=i+2 if pat[i]=='!'else i+1\n while True :\n k=pat.find('-',k,j)\n if k <0:\n break\n chunks.append(pat[i:k])\n i=k+1\n k=k+3\n chunks.append(pat[i:j])\n \n \n stuff='-'.join(s.replace('\\\\',r'\\\\').replace('-',r'\\-')\n for s in chunks)\n \n stuff=re.sub(r'([&~|])',r'\\\\\\1',stuff)\n i=j+1\n if stuff[0]=='!':\n stuff='^'+stuff[1:]\n elif stuff[0]in ('^','['):\n stuff='\\\\'+stuff\n res='%s[%s]'%(res,stuff)\n else :\n res=res+re.escape(c)\n return r'(?s:%s)\\Z'%res\n", ["functools", "os", "posixpath", "re"]], "formatter": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport warnings\nwarnings.warn('the formatter module is deprecated',DeprecationWarning,\nstacklevel=2)\n\n\nAS_IS=None\n\n\nclass NullFormatter:\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,writer=None ):\n if writer is None :\n writer=NullWriter()\n self.writer=writer\n def end_paragraph(self,blankline):pass\n def add_line_break(self):pass\n def add_hor_rule(self,*args,**kw):pass\n def add_label_data(self,format,counter,blankline=None ):pass\n def add_flowing_data(self,data):pass\n def add_literal_data(self,data):pass\n def flush_softspace(self):pass\n def push_alignment(self,align):pass\n def pop_alignment(self):pass\n def push_font(self,x):pass\n def pop_font(self):pass\n def push_margin(self,margin):pass\n def pop_margin(self):pass\n def set_spacing(self,spacing):pass\n def push_style(self,*styles):pass\n def pop_style(self,n=1):pass\n def assert_line_data(self,flag=1):pass\n \n \nclass AbstractFormatter:\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n def __init__(self,writer):\n self.writer=writer\n self.align=None\n self.align_stack=[]\n self.font_stack=[]\n self.margin_stack=[]\n self.spacing=None\n self.style_stack=[]\n self.nospace=1\n self.softspace=0\n self.para_end=1\n self.parskip=0\n self.hard_break=1\n self.have_label=0\n \n def end_paragraph(self,blankline):\n if not self.hard_break:\n self.writer.send_line_break()\n self.have_label=0\n if self.parskip 0:\n label=label+self.format_letter(c,counter)\n elif c in 'iI':\n if counter >0:\n label=label+self.format_roman(c,counter)\n else :\n label=label+c\n return label\n \n def format_letter(self,case,counter):\n label=''\n while counter >0:\n counter,x=divmod(counter -1,26)\n \n \n \n s=chr(ord(case)+x)\n label=s+label\n return label\n \n def format_roman(self,case,counter):\n ones=['i','x','c','m']\n fives=['v','l','d']\n label,index='',0\n \n while counter >0:\n counter,x=divmod(counter,10)\n if x ==9:\n label=ones[index]+ones[index+1]+label\n elif x ==4:\n label=ones[index]+fives[index]+label\n else :\n if x >=5:\n s=fives[index]\n x=x -5\n else :\n s=''\n s=s+ones[index]*x\n label=s+label\n index=index+1\n if case =='I':\n return label.upper()\n return label\n \n def add_flowing_data(self,data):\n if not data:return\n prespace=data[:1].isspace()\n postspace=data[-1:].isspace()\n data=\" \".join(data.split())\n if self.nospace and not data:\n return\n elif prespace or self.softspace:\n if not data:\n if not self.nospace:\n self.softspace=1\n self.parskip=0\n return\n if not self.nospace:\n data=' '+data\n self.hard_break=self.nospace=self.para_end=\\\n self.parskip=self.have_label=0\n self.softspace=postspace\n self.writer.send_flowing_data(data)\n \n def add_literal_data(self,data):\n if not data:return\n if self.softspace:\n self.writer.send_flowing_data(\" \")\n self.hard_break=data[-1:]=='\\n'\n self.nospace=self.para_end=self.softspace=\\\n self.parskip=self.have_label=0\n self.writer.send_literal_data(data)\n \n def flush_softspace(self):\n if self.softspace:\n self.hard_break=self.para_end=self.parskip=\\\n self.have_label=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n \n def push_alignment(self,align):\n if align and align !=self.align:\n self.writer.new_alignment(align)\n self.align=align\n self.align_stack.append(align)\n else :\n self.align_stack.append(self.align)\n \n def pop_alignment(self):\n if self.align_stack:\n del self.align_stack[-1]\n if self.align_stack:\n self.align=align=self.align_stack[-1]\n self.writer.new_alignment(align)\n else :\n self.align=None\n self.writer.new_alignment(None )\n \n def push_font(self,font):\n size,i,b,tt=font\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n if self.font_stack:\n csize,ci,cb,ctt=self.font_stack[-1]\n if size is AS_IS:size=csize\n if i is AS_IS:i=ci\n if b is AS_IS:b=cb\n if tt is AS_IS:tt=ctt\n font=(size,i,b,tt)\n self.font_stack.append(font)\n self.writer.new_font(font)\n \n def pop_font(self):\n if self.font_stack:\n del self.font_stack[-1]\n if self.font_stack:\n font=self.font_stack[-1]\n else :\n font=None\n self.writer.new_font(font)\n \n def push_margin(self,margin):\n self.margin_stack.append(margin)\n fstack=[m for m in self.margin_stack if m]\n if not margin and fstack:\n margin=fstack[-1]\n self.writer.new_margin(margin,len(fstack))\n \n def pop_margin(self):\n if self.margin_stack:\n del self.margin_stack[-1]\n fstack=[m for m in self.margin_stack if m]\n if fstack:\n margin=fstack[-1]\n else :\n margin=None\n self.writer.new_margin(margin,len(fstack))\n \n def set_spacing(self,spacing):\n self.spacing=spacing\n self.writer.new_spacing(spacing)\n \n def push_style(self,*styles):\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n for style in styles:\n self.style_stack.append(style)\n self.writer.new_styles(tuple(self.style_stack))\n \n def pop_style(self,n=1):\n del self.style_stack[-n:]\n self.writer.new_styles(tuple(self.style_stack))\n \n def assert_line_data(self,flag=1):\n self.nospace=self.hard_break=not flag\n self.para_end=self.parskip=self.have_label=0\n \n \nclass NullWriter:\n ''\n\n\n\n\n\n \n def __init__(self):pass\n def flush(self):pass\n def new_alignment(self,align):pass\n def new_font(self,font):pass\n def new_margin(self,margin,level):pass\n def new_spacing(self,spacing):pass\n def new_styles(self,styles):pass\n def send_paragraph(self,blankline):pass\n def send_line_break(self):pass\n def send_hor_rule(self,*args,**kw):pass\n def send_label_data(self,data):pass\n def send_flowing_data(self,data):pass\n def send_literal_data(self,data):pass\n \n \nclass AbstractWriter(NullWriter):\n ''\n\n\n\n\n \n \n def new_alignment(self,align):\n print(\"new_alignment(%r)\"%(align,))\n \n def new_font(self,font):\n print(\"new_font(%r)\"%(font,))\n \n def new_margin(self,margin,level):\n print(\"new_margin(%r, %d)\"%(margin,level))\n \n def new_spacing(self,spacing):\n print(\"new_spacing(%r)\"%(spacing,))\n \n def new_styles(self,styles):\n print(\"new_styles(%r)\"%(styles,))\n \n def send_paragraph(self,blankline):\n print(\"send_paragraph(%r)\"%(blankline,))\n \n def send_line_break(self):\n print(\"send_line_break()\")\n \n def send_hor_rule(self,*args,**kw):\n print(\"send_hor_rule()\")\n \n def send_label_data(self,data):\n print(\"send_label_data(%r)\"%(data,))\n \n def send_flowing_data(self,data):\n print(\"send_flowing_data(%r)\"%(data,))\n \n def send_literal_data(self,data):\n print(\"send_literal_data(%r)\"%(data,))\n \n \nclass DumbWriter(NullWriter):\n ''\n\n\n\n\n\n \n \n def __init__(self,file=None ,maxcol=72):\n self.file=file or sys.stdout\n self.maxcol=maxcol\n NullWriter.__init__(self)\n self.reset()\n \n def reset(self):\n self.col=0\n self.atbreak=0\n \n def send_paragraph(self,blankline):\n self.file.write('\\n'*blankline)\n self.col=0\n self.atbreak=0\n \n def send_line_break(self):\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_hor_rule(self,*args,**kw):\n self.file.write('\\n')\n self.file.write('-'*self.maxcol)\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_literal_data(self,data):\n self.file.write(data)\n i=data.rfind('\\n')\n if i >=0:\n self.col=0\n data=data[i+1:]\n data=data.expandtabs()\n self.col=self.col+len(data)\n self.atbreak=0\n \n def send_flowing_data(self,data):\n if not data:return\n atbreak=self.atbreak or data[0].isspace()\n col=self.col\n maxcol=self.maxcol\n write=self.file.write\n for word in data.split():\n if atbreak:\n if col+len(word)>=maxcol:\n write('\\n')\n col=0\n else :\n write(' ')\n col=col+1\n write(word)\n col=col+len(word)\n atbreak=1\n self.col=col\n self.atbreak=data[-1].isspace()\n \n \ndef test(file=None ):\n w=DumbWriter()\n f=AbstractFormatter(w)\n if file is not None :\n fp=open(file)\n elif sys.argv[1:]:\n fp=open(sys.argv[1])\n else :\n fp=sys.stdin\n try :\n for line in fp:\n if line =='\\n':\n f.end_paragraph(1)\n else :\n f.add_flowing_data(line)\n finally :\n if fp is not sys.stdin:\n fp.close()\n f.end_paragraph(0)\n \n \nif __name__ =='__main__':\n test()\n", ["sys", "warnings"]], "fractions": [".py", "\n\n\n\"\"\"Fraction, infinite-precision, real numbers.\"\"\"\n\nfrom decimal import Decimal\nimport math\nimport numbers\nimport operator\nimport re\nimport sys\n\n__all__=['Fraction','gcd']\n\n\n\ndef gcd(a,b):\n ''\n\n\n\n \n import warnings\n warnings.warn('fractions.gcd() is deprecated. Use math.gcd() instead.',\n DeprecationWarning,2)\n if type(a)is int is type(b):\n if (b or a)<0:\n return -math.gcd(a,b)\n return math.gcd(a,b)\n return _gcd(a,b)\n \ndef _gcd(a,b):\n\n while b:\n a,b=b,a %b\n return a\n \n \n \n_PyHASH_MODULUS=sys.hash_info.modulus\n\n\n_PyHASH_INF=sys.hash_info.inf\n\n_RATIONAL_FORMAT=re.compile(r\"\"\"\n \\A\\s* # optional whitespace at the start, then\n (?P[-+]?) # an optional sign, then\n (?=\\d|\\.\\d) # lookahead for digit or .digit\n (?P\\d*) # numerator (possibly empty)\n (?: # followed by\n (?:/(?P\\d+))? # an optional denominator\n | # or\n (?:\\.(?P\\d*))? # an optional fractional part\n (?:E(?P[-+]?\\d+))? # and optional exponent\n )\n \\s*\\Z # and optional whitespace to finish\n\"\"\",re.VERBOSE |re.IGNORECASE)\n\n\nclass Fraction(numbers.Rational):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_numerator','_denominator')\n \n \n def __new__(cls,numerator=0,denominator=None ,*,_normalize=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self=super(Fraction,cls).__new__(cls)\n \n if denominator is None :\n if type(numerator)is int:\n self._numerator=numerator\n self._denominator=1\n return self\n \n elif isinstance(numerator,numbers.Rational):\n self._numerator=numerator.numerator\n self._denominator=numerator.denominator\n return self\n \n elif isinstance(numerator,(float,Decimal)):\n \n self._numerator,self._denominator=numerator.as_integer_ratio()\n return self\n \n elif isinstance(numerator,str):\n \n m=_RATIONAL_FORMAT.match(numerator)\n if m is None :\n raise ValueError('Invalid literal for Fraction: %r'%\n numerator)\n numerator=int(m.group('num')or '0')\n denom=m.group('denom')\n if denom:\n denominator=int(denom)\n else :\n denominator=1\n decimal=m.group('decimal')\n if decimal:\n scale=10 **len(decimal)\n numerator=numerator *scale+int(decimal)\n denominator *=scale\n exp=m.group('exp')\n if exp:\n exp=int(exp)\n if exp >=0:\n numerator *=10 **exp\n else :\n denominator *=10 **-exp\n if m.group('sign')=='-':\n numerator=-numerator\n \n else :\n raise TypeError(\"argument should be a string \"\n \"or a Rational instance\")\n \n elif type(numerator)is int is type(denominator):\n pass\n \n elif (isinstance(numerator,numbers.Rational)and\n isinstance(denominator,numbers.Rational)):\n numerator,denominator=(\n numerator.numerator *denominator.denominator,\n denominator.numerator *numerator.denominator\n )\n else :\n raise TypeError(\"both arguments should be \"\n \"Rational instances\")\n \n if denominator ==0:\n raise ZeroDivisionError('Fraction(%s, 0)'%numerator)\n if _normalize:\n if type(numerator)is int is type(denominator):\n \n g=math.gcd(numerator,denominator)\n if denominator <0:\n g=-g\n else :\n g=_gcd(numerator,denominator)\n numerator //=g\n denominator //=g\n self._numerator=numerator\n self._denominator=denominator\n return self\n \n @classmethod\n def from_float(cls,f):\n ''\n\n\n\n \n if isinstance(f,numbers.Integral):\n return cls(f)\n elif not isinstance(f,float):\n raise TypeError(\"%s.from_float() only takes floats, not %r (%s)\"%\n (cls.__name__,f,type(f).__name__))\n return cls(*f.as_integer_ratio())\n \n @classmethod\n def from_decimal(cls,dec):\n ''\n from decimal import Decimal\n if isinstance(dec,numbers.Integral):\n dec=Decimal(int(dec))\n elif not isinstance(dec,Decimal):\n raise TypeError(\n \"%s.from_decimal() only takes Decimals, not %r (%s)\"%\n (cls.__name__,dec,type(dec).__name__))\n return cls(*dec.as_integer_ratio())\n \n def limit_denominator(self,max_denominator=1000000):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if max_denominator <1:\n raise ValueError(\"max_denominator should be at least 1\")\n if self._denominator <=max_denominator:\n return Fraction(self)\n \n p0,q0,p1,q1=0,1,1,0\n n,d=self._numerator,self._denominator\n while True :\n a=n //d\n q2=q0+a *q1\n if q2 >max_denominator:\n break\n p0,q0,p1,q1=p1,q1,p0+a *p1,q2\n n,d=d,n -a *d\n \n k=(max_denominator -q0)//q1\n bound1=Fraction(p0+k *p1,q0+k *q1)\n bound2=Fraction(p1,q1)\n if abs(bound2 -self)<=abs(bound1 -self):\n return bound2\n else :\n return bound1\n \n @property\n def numerator(a):\n return a._numerator\n \n @property\n def denominator(a):\n return a._denominator\n \n def __repr__(self):\n ''\n return '%s(%s, %s)'%(self.__class__.__name__,\n self._numerator,self._denominator)\n \n def __str__(self):\n ''\n if self._denominator ==1:\n return str(self._numerator)\n else :\n return '%s/%s'%(self._numerator,self._denominator)\n \n def _operator_fallbacks(monomorphic_operator,fallback_operator):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def forward(a,b):\n if isinstance(b,(int,Fraction)):\n return monomorphic_operator(a,b)\n elif isinstance(b,float):\n return fallback_operator(float(a),b)\n elif isinstance(b,complex):\n return fallback_operator(complex(a),b)\n else :\n return NotImplemented\n forward.__name__='__'+fallback_operator.__name__+'__'\n forward.__doc__=monomorphic_operator.__doc__\n \n def reverse(b,a):\n if isinstance(a,numbers.Rational):\n \n return monomorphic_operator(a,b)\n elif isinstance(a,numbers.Real):\n return fallback_operator(float(a),float(b))\n elif isinstance(a,numbers.Complex):\n return fallback_operator(complex(a),complex(b))\n else :\n return NotImplemented\n reverse.__name__='__r'+fallback_operator.__name__+'__'\n reverse.__doc__=monomorphic_operator.__doc__\n \n return forward,reverse\n \n def _add(a,b):\n ''\n da,db=a.denominator,b.denominator\n return Fraction(a.numerator *db+b.numerator *da,\n da *db)\n \n __add__,__radd__=_operator_fallbacks(_add,operator.add)\n \n def _sub(a,b):\n ''\n da,db=a.denominator,b.denominator\n return Fraction(a.numerator *db -b.numerator *da,\n da *db)\n \n __sub__,__rsub__=_operator_fallbacks(_sub,operator.sub)\n \n def _mul(a,b):\n ''\n return Fraction(a.numerator *b.numerator,a.denominator *b.denominator)\n \n __mul__,__rmul__=_operator_fallbacks(_mul,operator.mul)\n \n def _div(a,b):\n ''\n return Fraction(a.numerator *b.denominator,\n a.denominator *b.numerator)\n \n __truediv__,__rtruediv__=_operator_fallbacks(_div,operator.truediv)\n \n def __floordiv__(a,b):\n ''\n return math.floor(a /b)\n \n def __rfloordiv__(b,a):\n ''\n return math.floor(a /b)\n \n def __mod__(a,b):\n ''\n div=a //b\n return a -b *div\n \n def __rmod__(b,a):\n ''\n div=a //b\n return a -b *div\n \n def __pow__(a,b):\n ''\n\n\n\n\n\n \n if isinstance(b,numbers.Rational):\n if b.denominator ==1:\n power=b.numerator\n if power >=0:\n return Fraction(a._numerator **power,\n a._denominator **power,\n _normalize=False )\n elif a._numerator >=0:\n return Fraction(a._denominator **-power,\n a._numerator **-power,\n _normalize=False )\n else :\n return Fraction((-a._denominator)**-power,\n (-a._numerator)**-power,\n _normalize=False )\n else :\n \n \n return float(a)**float(b)\n else :\n return float(a)**b\n \n def __rpow__(b,a):\n ''\n if b._denominator ==1 and b._numerator >=0:\n \n return a **b._numerator\n \n if isinstance(a,numbers.Rational):\n return Fraction(a.numerator,a.denominator)**b\n \n if b._denominator ==1:\n return a **b._numerator\n \n return a **float(b)\n \n def __pos__(a):\n ''\n return Fraction(a._numerator,a._denominator,_normalize=False )\n \n def __neg__(a):\n ''\n return Fraction(-a._numerator,a._denominator,_normalize=False )\n \n def __abs__(a):\n ''\n return Fraction(abs(a._numerator),a._denominator,_normalize=False )\n \n def __trunc__(a):\n ''\n if a._numerator <0:\n return -(-a._numerator //a._denominator)\n else :\n return a._numerator //a._denominator\n \n def __floor__(a):\n ''\n return a.numerator //a.denominator\n \n def __ceil__(a):\n ''\n \n return -(-a.numerator //a.denominator)\n \n def __round__(self,ndigits=None ):\n ''\n\n\n \n if ndigits is None :\n floor,remainder=divmod(self.numerator,self.denominator)\n if remainder *2 self.denominator:\n return floor+1\n \n elif floor %2 ==0:\n return floor\n else :\n return floor+1\n shift=10 **abs(ndigits)\n \n \n \n if ndigits >0:\n return Fraction(round(self *shift),shift)\n else :\n return Fraction(round(self /shift)*shift)\n \n def __hash__(self):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n dinv=pow(self._denominator,_PyHASH_MODULUS -2,_PyHASH_MODULUS)\n if not dinv:\n hash_=_PyHASH_INF\n else :\n hash_=abs(self._numerator)*dinv %_PyHASH_MODULUS\n result=hash_ if self >=0 else -hash_\n return -2 if result ==-1 else result\n \n def __eq__(a,b):\n ''\n if type(b)is int:\n return a._numerator ==b and a._denominator ==1\n if isinstance(b,numbers.Rational):\n return (a._numerator ==b.numerator and\n a._denominator ==b.denominator)\n if isinstance(b,numbers.Complex)and b.imag ==0:\n b=b.real\n if isinstance(b,float):\n if math.isnan(b)or math.isinf(b):\n \n \n return 0.0 ==b\n else :\n return a ==a.from_float(b)\n else :\n \n \n return NotImplemented\n \n def _richcmp(self,other,op):\n ''\n\n\n\n\n\n\n\n \n \n if isinstance(other,numbers.Rational):\n return op(self._numerator *other.denominator,\n self._denominator *other.numerator)\n if isinstance(other,float):\n if math.isnan(other)or math.isinf(other):\n return op(0.0,other)\n else :\n return op(self,self.from_float(other))\n else :\n return NotImplemented\n \n def __lt__(a,b):\n ''\n return a._richcmp(b,operator.lt)\n \n def __gt__(a,b):\n ''\n return a._richcmp(b,operator.gt)\n \n def __le__(a,b):\n ''\n return a._richcmp(b,operator.le)\n \n def __ge__(a,b):\n ''\n return a._richcmp(b,operator.ge)\n \n def __bool__(a):\n ''\n return a._numerator !=0\n \n \n \n def __reduce__(self):\n return (self.__class__,(str(self),))\n \n def __copy__(self):\n if type(self)==Fraction:\n return self\n return self.__class__(self._numerator,self._denominator)\n \n def __deepcopy__(self,memo):\n if type(self)==Fraction:\n return self\n return self.__class__(self._numerator,self._denominator)\n", ["decimal", "math", "numbers", "operator", "re", "sys", "warnings"]], "functools": [".py", "''\n\n\n\n\n\n\n\n\n\n\n__all__=['update_wrapper','wraps','WRAPPER_ASSIGNMENTS','WRAPPER_UPDATES',\n'total_ordering','cmp_to_key','lru_cache','reduce','partial',\n'partialmethod','singledispatch']\n\ntry :\n from _functools import reduce\nexcept ImportError:\n pass\nfrom abc import get_cache_token\nfrom collections import namedtuple\n\nfrom reprlib import recursive_repr\nfrom _thread import RLock\n\n\n\n\n\n\n\n\n\nWRAPPER_ASSIGNMENTS=('__module__','__name__','__qualname__','__doc__',\n'__annotations__')\nWRAPPER_UPDATES=('__dict__',)\ndef update_wrapper(wrapper,\nwrapped,\nassigned=WRAPPER_ASSIGNMENTS,\nupdated=WRAPPER_UPDATES):\n ''\n\n\n\n\n\n\n\n\n\n \n for attr in assigned:\n try :\n value=getattr(wrapped,attr)\n except AttributeError:\n pass\n else :\n setattr(wrapper,attr,value)\n for attr in updated:\n getattr(wrapper,attr).update(getattr(wrapped,attr,{}))\n \n \n wrapper.__wrapped__=wrapped\n \n return wrapper\n \ndef wraps(wrapped,\nassigned=WRAPPER_ASSIGNMENTS,\nupdated=WRAPPER_UPDATES):\n ''\n\n\n\n\n\n\n \n return partial(update_wrapper,wrapped=wrapped,\n assigned=assigned,updated=updated)\n \n \n \n \n \n \n \n \n \n \n \ndef _gt_from_lt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__lt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result and self !=other\n \ndef _le_from_lt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__lt__(other)\n return op_result or self ==other\n \ndef _ge_from_lt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__lt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \ndef _ge_from_le(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__le__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result or self ==other\n \ndef _lt_from_le(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__le__(other)\n if op_result is NotImplemented:\n return op_result\n return op_result and self !=other\n \ndef _gt_from_le(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__le__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \ndef _lt_from_gt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__gt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result and self !=other\n \ndef _ge_from_gt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__gt__(other)\n return op_result or self ==other\n \ndef _le_from_gt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__gt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \ndef _le_from_ge(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__ge__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result or self ==other\n \ndef _gt_from_ge(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__ge__(other)\n if op_result is NotImplemented:\n return op_result\n return op_result and self !=other\n \ndef _lt_from_ge(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__ge__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \n_convert={\n'__lt__':[('__gt__',_gt_from_lt),\n('__le__',_le_from_lt),\n('__ge__',_ge_from_lt)],\n'__le__':[('__ge__',_ge_from_le),\n('__lt__',_lt_from_le),\n('__gt__',_gt_from_le)],\n'__gt__':[('__lt__',_lt_from_gt),\n('__ge__',_ge_from_gt),\n('__le__',_le_from_gt)],\n'__ge__':[('__le__',_le_from_ge),\n('__gt__',_gt_from_ge),\n('__lt__',_lt_from_ge)]\n}\n\ndef total_ordering(cls):\n ''\n \n roots={op for op in _convert if getattr(cls,op,None )is not getattr(object,op,None )}\n if not roots:\n raise ValueError('must define at least one ordering operation: < > <= >=')\n root=max(roots)\n for opname,opfunc in _convert[root]:\n if opname not in roots:\n opfunc.__name__=opname\n setattr(cls,opname,opfunc)\n return cls\n \n \n \n \n \n \ndef cmp_to_key(mycmp):\n ''\n class K(object):\n __slots__=['obj']\n def __init__(self,obj):\n self.obj=obj\n def __lt__(self,other):\n return mycmp(self.obj,other.obj)<0\n def __gt__(self,other):\n return mycmp(self.obj,other.obj)>0\n def __eq__(self,other):\n return mycmp(self.obj,other.obj)==0\n def __le__(self,other):\n return mycmp(self.obj,other.obj)<=0\n def __ge__(self,other):\n return mycmp(self.obj,other.obj)>=0\n __hash__=None\n return K\n \ntry :\n from _functools import cmp_to_key\nexcept ImportError:\n pass\n \n \n \n \n \n \n \nclass partial:\n ''\n\n \n \n __slots__=\"func\",\"args\",\"keywords\",\"__dict__\",\"__weakref__\"\n \n def __new__(*args,**keywords):\n if not args:\n raise TypeError(\"descriptor '__new__' of partial needs an argument\")\n if len(args)<2:\n raise TypeError(\"type 'partial' takes at least one argument\")\n cls,func,*args=args\n if not callable(func):\n raise TypeError(\"the first argument must be callable\")\n args=tuple(args)\n \n if hasattr(func,\"func\"):\n args=func.args+args\n tmpkw=func.keywords.copy()\n tmpkw.update(keywords)\n keywords=tmpkw\n del tmpkw\n func=func.func\n \n self=super(partial,cls).__new__(cls)\n \n self.func=func\n self.args=args\n self.keywords=keywords\n return self\n \n def __call__(*args,**keywords):\n if not args:\n raise TypeError(\"descriptor '__call__' of partial needs an argument\")\n self,*args=args\n newkeywords=self.keywords.copy()\n newkeywords.update(keywords)\n return self.func(*self.args,*args,**newkeywords)\n \n @recursive_repr()\n def __repr__(self):\n qualname=type(self).__qualname__\n args=[repr(self.func)]\n args.extend(repr(x)for x in self.args)\n args.extend(f\"{k}={v!r}\"for (k,v)in self.keywords.items())\n if type(self).__module__ ==\"functools\":\n return f\"functools.{qualname}({', '.join(args)})\"\n return f\"{qualname}({', '.join(args)})\"\n \n def __reduce__(self):\n return type(self),(self.func,),(self.func,self.args,\n self.keywords or None ,self.__dict__ or None )\n \n def __setstate__(self,state):\n if not isinstance(state,tuple):\n raise TypeError(\"argument to __setstate__ must be a tuple\")\n if len(state)!=4:\n raise TypeError(f\"expected 4 items in state, got {len(state)}\")\n func,args,kwds,namespace=state\n if (not callable(func)or not isinstance(args,tuple)or\n (kwds is not None and not isinstance(kwds,dict))or\n (namespace is not None and not isinstance(namespace,dict))):\n raise TypeError(\"invalid partial state\")\n \n args=tuple(args)\n if kwds is None :\n kwds={}\n elif type(kwds)is not dict:\n kwds=dict(kwds)\n if namespace is None :\n namespace={}\n \n self.__dict__=namespace\n self.func=func\n self.args=args\n self.keywords=kwds\n \ntry :\n from _functools import partial\nexcept ImportError:\n pass\n \n \nclass partialmethod(object):\n ''\n\n\n\n\n \n \n def __init__(self,func,*args,**keywords):\n if not callable(func)and not hasattr(func,\"__get__\"):\n raise TypeError(\"{!r} is not callable or a descriptor\"\n .format(func))\n \n \n \n if isinstance(func,partialmethod):\n \n \n \n self.func=func.func\n self.args=func.args+args\n self.keywords=func.keywords.copy()\n self.keywords.update(keywords)\n else :\n self.func=func\n self.args=args\n self.keywords=keywords\n \n def __repr__(self):\n args=\", \".join(map(repr,self.args))\n keywords=\", \".join(\"{}={!r}\".format(k,v)\n for k,v in self.keywords.items())\n format_string=\"{module}.{cls}({func}, {args}, {keywords})\"\n return format_string.format(module=self.__class__.__module__,\n cls=self.__class__.__qualname__,\n func=self.func,\n args=args,\n keywords=keywords)\n \n def _make_unbound_method(self):\n def _method(*args,**keywords):\n call_keywords=self.keywords.copy()\n call_keywords.update(keywords)\n cls_or_self,*rest=args\n call_args=(cls_or_self,)+self.args+tuple(rest)\n return self.func(*call_args,**call_keywords)\n _method.__isabstractmethod__=self.__isabstractmethod__\n _method._partialmethod=self\n return _method\n \n def __get__(self,obj,cls):\n get=getattr(self.func,\"__get__\",None )\n result=None\n if get is not None :\n new_func=get(obj,cls)\n if new_func is not self.func:\n \n \n result=partial(new_func,*self.args,**self.keywords)\n try :\n result.__self__=new_func.__self__\n except AttributeError:\n pass\n if result is None :\n \n \n result=self._make_unbound_method().__get__(obj,cls)\n return result\n \n @property\n def __isabstractmethod__(self):\n return getattr(self.func,\"__isabstractmethod__\",False )\n \n \n \n \n \n \n_CacheInfo=namedtuple(\"CacheInfo\",[\"hits\",\"misses\",\"maxsize\",\"currsize\"])\n\nclass _HashedSeq(list):\n ''\n\n\n\n \n \n __slots__='hashvalue'\n \n def __init__(self,tup,hash=hash):\n self[:]=tup\n self.hashvalue=hash(tup)\n \n def __hash__(self):\n return self.hashvalue\n \ndef _make_key(args,kwds,typed,\nkwd_mark=(object(),),\nfasttypes={int,str,frozenset,type(None )},\ntuple=tuple,type=type,len=len):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n key=args\n if kwds:\n key +=kwd_mark\n for item in kwds.items():\n key +=item\n if typed:\n key +=tuple(type(v)for v in args)\n if kwds:\n key +=tuple(type(v)for v in kwds.values())\n elif len(key)==1 and type(key[0])in fasttypes:\n return key[0]\n return _HashedSeq(key)\n \ndef lru_cache(maxsize=128,typed=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n if maxsize is not None and not isinstance(maxsize,int):\n raise TypeError('Expected maxsize to be an integer or None')\n \n def decorating_function(user_function):\n wrapper=_lru_cache_wrapper(user_function,maxsize,typed,_CacheInfo)\n return update_wrapper(wrapper,user_function)\n \n return decorating_function\n \ndef _lru_cache_wrapper(user_function,maxsize,typed,_CacheInfo):\n\n sentinel=object()\n make_key=_make_key\n PREV,NEXT,KEY,RESULT=0,1,2,3\n \n cache={}\n hits=misses=0\n full=False\n cache_get=cache.get\n cache_len=cache.__len__\n lock=RLock()\n root=[]\n root[:]=[root,root,None ,None ]\n \n if maxsize ==0:\n \n def wrapper(*args,**kwds):\n \n nonlocal misses\n result=user_function(*args,**kwds)\n misses +=1\n return result\n \n elif maxsize is None :\n \n def wrapper(*args,**kwds):\n \n nonlocal hits,misses\n key=make_key(args,kwds,typed)\n result=cache_get(key,sentinel)\n if result is not sentinel:\n hits +=1\n return result\n result=user_function(*args,**kwds)\n cache[key]=result\n misses +=1\n return result\n \n else :\n \n def wrapper(*args,**kwds):\n \n nonlocal root,hits,misses,full\n key=make_key(args,kwds,typed)\n with lock:\n link=cache_get(key)\n if link is not None :\n \n link_prev,link_next,_key,result=link\n link_prev[NEXT]=link_next\n link_next[PREV]=link_prev\n last=root[PREV]\n last[NEXT]=root[PREV]=link\n link[PREV]=last\n link[NEXT]=root\n hits +=1\n return result\n result=user_function(*args,**kwds)\n with lock:\n if key in cache:\n \n \n \n \n pass\n elif full:\n \n oldroot=root\n oldroot[KEY]=key\n oldroot[RESULT]=result\n \n \n \n \n \n \n root=oldroot[NEXT]\n oldkey=root[KEY]\n oldresult=root[RESULT]\n root[KEY]=root[RESULT]=None\n \n del cache[oldkey]\n \n \n \n cache[key]=oldroot\n else :\n \n last=root[PREV]\n link=[last,root,key,result]\n last[NEXT]=root[PREV]=cache[key]=link\n \n \n full=(cache_len()>=maxsize)\n misses +=1\n return result\n \n def cache_info():\n ''\n with lock:\n return _CacheInfo(hits,misses,maxsize,cache_len())\n \n def cache_clear():\n ''\n nonlocal hits,misses,full\n with lock:\n cache.clear()\n root[:]=[root,root,None ,None ]\n hits=misses=0\n full=False\n \n wrapper.cache_info=cache_info\n wrapper.cache_clear=cache_clear\n return wrapper\n \ntry :\n from _functools import _lru_cache_wrapper\nexcept ImportError:\n pass\n \n \n \n \n \n \ndef _c3_merge(sequences):\n ''\n\n\n\n \n result=[]\n while True :\n sequences=[s for s in sequences if s]\n if not sequences:\n return result\n for s1 in sequences:\n candidate=s1[0]\n for s2 in sequences:\n if candidate in s2[1:]:\n candidate=None\n break\n else :\n break\n if candidate is None :\n raise RuntimeError(\"Inconsistent hierarchy\")\n result.append(candidate)\n \n for seq in sequences:\n if seq[0]==candidate:\n del seq[0]\n \ndef _c3_mro(cls,abcs=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n for i,base in enumerate(reversed(cls.__bases__)):\n if hasattr(base,'__abstractmethods__'):\n boundary=len(cls.__bases__)-i\n break\n else :\n boundary=0\n abcs=list(abcs)if abcs else []\n explicit_bases=list(cls.__bases__[:boundary])\n abstract_bases=[]\n other_bases=list(cls.__bases__[boundary:])\n for base in abcs:\n if issubclass(cls,base)and not any(\n issubclass(b,base)for b in cls.__bases__\n ):\n \n \n abstract_bases.append(base)\n for base in abstract_bases:\n abcs.remove(base)\n explicit_c3_mros=[_c3_mro(base,abcs=abcs)for base in explicit_bases]\n abstract_c3_mros=[_c3_mro(base,abcs=abcs)for base in abstract_bases]\n other_c3_mros=[_c3_mro(base,abcs=abcs)for base in other_bases]\n return _c3_merge(\n [[cls]]+\n explicit_c3_mros+abstract_c3_mros+other_c3_mros+\n [explicit_bases]+[abstract_bases]+[other_bases]\n )\n \ndef _compose_mro(cls,types):\n ''\n\n\n\n\n \n bases=set(cls.__mro__)\n \n def is_related(typ):\n return (typ not in bases and hasattr(typ,'__mro__')\n and issubclass(cls,typ))\n types=[n for n in types if is_related(n)]\n \n \n def is_strict_base(typ):\n for other in types:\n if typ !=other and typ in other.__mro__:\n return True\n return False\n types=[n for n in types if not is_strict_base(n)]\n \n \n type_set=set(types)\n mro=[]\n for typ in types:\n found=[]\n for sub in typ.__subclasses__():\n if sub not in bases and issubclass(cls,sub):\n found.append([s for s in sub.__mro__ if s in type_set])\n if not found:\n mro.append(typ)\n continue\n \n found.sort(key=len,reverse=True )\n for sub in found:\n for subcls in sub:\n if subcls not in mro:\n mro.append(subcls)\n return _c3_mro(cls,abcs=mro)\n \ndef _find_impl(cls,registry):\n ''\n\n\n\n\n\n\n\n \n mro=_compose_mro(cls,registry.keys())\n match=None\n for t in mro:\n if match is not None :\n \n \n if (t in registry and t not in cls.__mro__\n and match not in cls.__mro__\n and not issubclass(match,t)):\n raise RuntimeError(\"Ambiguous dispatch: {} or {}\".format(\n match,t))\n break\n if t in registry:\n match=t\n return registry.get(match)\n \ndef singledispatch(func):\n ''\n\n\n\n\n\n\n \n \n \n \n import types,weakref\n \n registry={}\n dispatch_cache=weakref.WeakKeyDictionary()\n cache_token=None\n \n def dispatch(cls):\n ''\n\n\n\n\n \n nonlocal cache_token\n if cache_token is not None :\n current_token=get_cache_token()\n if cache_token !=current_token:\n dispatch_cache.clear()\n cache_token=current_token\n try :\n impl=dispatch_cache[cls]\n except KeyError:\n try :\n impl=registry[cls]\n except KeyError:\n impl=_find_impl(cls,registry)\n dispatch_cache[cls]=impl\n return impl\n \n def register(cls,func=None ):\n ''\n\n\n\n \n nonlocal cache_token\n if func is None :\n if isinstance(cls,type):\n return lambda f:register(cls,f)\n ann=getattr(cls,'__annotations__',{})\n if not ann:\n raise TypeError(\n f\"Invalid first argument to `register()`: {cls!r}. \"\n f\"Use either `@register(some_class)` or plain `@register` \"\n f\"on an annotated function.\"\n )\n func=cls\n \n \n from typing import get_type_hints\n argname,cls=next(iter(get_type_hints(func).items()))\n assert isinstance(cls,type),(\n f\"Invalid annotation for {argname!r}. {cls!r} is not a class.\"\n )\n registry[cls]=func\n if cache_token is None and hasattr(cls,'__abstractmethods__'):\n cache_token=get_cache_token()\n dispatch_cache.clear()\n return func\n \n def wrapper(*args,**kw):\n return dispatch(args[0].__class__)(*args,**kw)\n \n registry[object]=func\n wrapper.register=register\n wrapper.dispatch=dispatch\n wrapper.registry=types.MappingProxyType(registry)\n wrapper._clear_cache=dispatch_cache.clear\n update_wrapper(wrapper,func)\n return wrapper\n", ["_functools", "_thread", "abc", "collections", "reprlib", "types", "typing", "weakref"]], "gc": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nDEBUG_COLLECTABLE=2\n\nDEBUG_LEAK=38\n\nDEBUG_SAVEALL=32\n\nDEBUG_STATS=1\n\nDEBUG_UNCOLLECTABLE=4\n\nclass __loader__:\n pass\n \ncallbacks=[]\n\ndef collect(*args,**kw):\n ''\n\n\n\n\n\n \n pass\n \ndef disable(*args,**kw):\n ''\n\n \n pass\n \ndef enable(*args,**kw):\n ''\n\n \n pass\n \ngarbage=[]\n\ndef get_count(*args,**kw):\n ''\n\n \n pass\n \ndef get_debug(*args,**kw):\n ''\n\n \n pass\n \ndef get_objects(*args,**kw):\n ''\n\n\n \n pass\n \ndef get_referents(*args,**kw):\n ''\n pass\n \ndef get_referrers(*args,**kw):\n ''\n pass\n \ndef get_threshold(*args,**kw):\n ''\n\n \n pass\n \ndef is_tracked(*args,**kw):\n ''\n\n\n \n pass\n \ndef isenabled(*args,**kw):\n ''\n\n \n pass\n \ndef set_debug(*args,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n \n pass\n \ndef set_threshold(*args,**kw):\n ''\n\n\n \n pass\n", []], "genericpath": [".py", "''\n\n\n\n\nimport os\nimport stat\n\n__all__=['commonprefix','exists','getatime','getctime','getmtime',\n'getsize','isdir','isfile','samefile','sameopenfile',\n'samestat']\n\n\n\n\ndef exists(path):\n ''\n try :\n os.stat(path)\n except OSError:\n return False\n return True\n \n \n \n \ndef isfile(path):\n ''\n try :\n st=os.stat(path)\n except OSError:\n return False\n return stat.S_ISREG(st.st_mode)\n \n \n \n \n \ndef isdir(s):\n ''\n try :\n st=os.stat(s)\n except OSError:\n return False\n return stat.S_ISDIR(st.st_mode)\n \n \ndef getsize(filename):\n ''\n return os.stat(filename).st_size\n \n \ndef getmtime(filename):\n ''\n return os.stat(filename).st_mtime\n \n \ndef getatime(filename):\n ''\n return os.stat(filename).st_atime\n \n \ndef getctime(filename):\n ''\n return os.stat(filename).st_ctime\n \n \n \ndef commonprefix(m):\n ''\n if not m:return ''\n \n \n \n \n if not isinstance(m[0],(list,tuple)):\n m=tuple(map(os.fspath,m))\n s1=min(m)\n s2=max(m)\n for i,c in enumerate(s1):\n if c !=s2[i]:\n return s1[:i]\n return s1\n \n \n \ndef samestat(s1,s2):\n ''\n return (s1.st_ino ==s2.st_ino and\n s1.st_dev ==s2.st_dev)\n \n \n \ndef samefile(f1,f2):\n ''\n s1=os.stat(f1)\n s2=os.stat(f2)\n return samestat(s1,s2)\n \n \n \n \ndef sameopenfile(fp1,fp2):\n ''\n s1=os.fstat(fp1)\n s2=os.fstat(fp2)\n return samestat(s1,s2)\n \n \n \n \n \n \n \n \n \ndef _splitext(p,sep,altsep,extsep):\n ''\n\n\n \n \n \n sepIndex=p.rfind(sep)\n if altsep:\n altsepIndex=p.rfind(altsep)\n sepIndex=max(sepIndex,altsepIndex)\n \n dotIndex=p.rfind(extsep)\n if dotIndex >sepIndex:\n \n filenameIndex=sepIndex+1\n while filenameIndex [ \\t]+) | # spaces and horizontal tabs\n (?P[0-9]+\\b) | # decimal integer\n (?Pn\\b) | # only n is allowed\n (?P[()]) |\n (?P[-*/%+?:]|[>,\n # <=, >=, ==, !=, &&, ||,\n # ? :\n # unary and bitwise ops\n # not allowed\n (?P\\w+|.) # invalid token\n \"\"\",re.VERBOSE |re.DOTALL)\n\ndef _tokenize(plural):\n for mo in re.finditer(_token_pattern,plural):\n kind=mo.lastgroup\n if kind =='WHITESPACES':\n continue\n value=mo.group(kind)\n if kind =='INVALID':\n raise ValueError('invalid token in plural form: %s'%value)\n yield value\n yield ''\n \ndef _error(value):\n if value:\n return ValueError('unexpected token in plural form: %s'%value)\n else :\n return ValueError('unexpected end of plural form')\n \n_binary_ops=(\n('||',),\n('&&',),\n('==','!='),\n('<','>','<=','>='),\n('+','-'),\n('*','/','%'),\n)\n_binary_ops={op:i for i,ops in enumerate(_binary_ops,1)for op in ops}\n_c2py_ops={'||':'or','&&':'and','/':'//'}\n\ndef _parse(tokens,priority=-1):\n result=''\n nexttok=next(tokens)\n while nexttok =='!':\n result +='not '\n nexttok=next(tokens)\n \n if nexttok =='(':\n sub,nexttok=_parse(tokens)\n result='%s(%s)'%(result,sub)\n if nexttok !=')':\n raise ValueError('unbalanced parenthesis in plural form')\n elif nexttok =='n':\n result='%s%s'%(result,nexttok)\n else :\n try :\n value=int(nexttok,10)\n except ValueError:\n raise _error(nexttok)from None\n result='%s%d'%(result,value)\n nexttok=next(tokens)\n \n j=100\n while nexttok in _binary_ops:\n i=_binary_ops[nexttok]\n if i 1000:\n raise ValueError('plural form expression is too long')\n try :\n result,nexttok=_parse(_tokenize(plural))\n if nexttok:\n raise _error(nexttok)\n \n depth=0\n for c in result:\n if c =='(':\n depth +=1\n if depth >20:\n \n \n raise ValueError('plural form expression is too complex')\n elif c ==')':\n depth -=1\n \n ns={'_as_int':_as_int}\n exec('''if True:\n def func(n):\n if not isinstance(n, int):\n n = _as_int(n)\n return int(%s)\n '''%result,ns)\n return ns['func']\n except RecursionError:\n \n raise ValueError('plural form expression is too complex')\n \n \ndef _expand_lang(loc):\n loc=locale.normalize(loc)\n COMPONENT_CODESET=1 <<0\n COMPONENT_TERRITORY=1 <<1\n COMPONENT_MODIFIER=1 <<2\n \n mask=0\n pos=loc.find('@')\n if pos >=0:\n modifier=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_MODIFIER\n else :\n modifier=''\n pos=loc.find('.')\n if pos >=0:\n codeset=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_CODESET\n else :\n codeset=''\n pos=loc.find('_')\n if pos >=0:\n territory=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_TERRITORY\n else :\n territory=''\n language=loc\n ret=[]\n for i in range(mask+1):\n if not (i&~mask):\n val=language\n if i&COMPONENT_TERRITORY:val +=territory\n if i&COMPONENT_CODESET:val +=codeset\n if i&COMPONENT_MODIFIER:val +=modifier\n ret.append(val)\n ret.reverse()\n return ret\n \n \n \nclass NullTranslations:\n def __init__(self,fp=None ):\n self._info={}\n self._charset=None\n self._output_charset=None\n self._fallback=None\n if fp is not None :\n self._parse(fp)\n \n def _parse(self,fp):\n pass\n \n def add_fallback(self,fallback):\n if self._fallback:\n self._fallback.add_fallback(fallback)\n else :\n self._fallback=fallback\n \n def gettext(self,message):\n if self._fallback:\n return self._fallback.gettext(message)\n return message\n \n def lgettext(self,message):\n if self._fallback:\n return self._fallback.lgettext(message)\n if self._output_charset:\n return message.encode(self._output_charset)\n return message.encode(locale.getpreferredencoding())\n \n def ngettext(self,msgid1,msgid2,n):\n if self._fallback:\n return self._fallback.ngettext(msgid1,msgid2,n)\n if n ==1:\n return msgid1\n else :\n return msgid2\n \n def lngettext(self,msgid1,msgid2,n):\n if self._fallback:\n return self._fallback.lngettext(msgid1,msgid2,n)\n if n ==1:\n tmsg=msgid1\n else :\n tmsg=msgid2\n if self._output_charset:\n return tmsg.encode(self._output_charset)\n return tmsg.encode(locale.getpreferredencoding())\n \n def info(self):\n return self._info\n \n def charset(self):\n return self._charset\n \n def output_charset(self):\n return self._output_charset\n \n def set_output_charset(self,charset):\n self._output_charset=charset\n \n def install(self,names=None ):\n import builtins\n builtins.__dict__['_']=self.gettext\n if hasattr(names,\"__contains__\"):\n if \"gettext\"in names:\n builtins.__dict__['gettext']=builtins.__dict__['_']\n if \"ngettext\"in names:\n builtins.__dict__['ngettext']=self.ngettext\n if \"lgettext\"in names:\n builtins.__dict__['lgettext']=self.lgettext\n if \"lngettext\"in names:\n builtins.__dict__['lngettext']=self.lngettext\n \n \nclass GNUTranslations(NullTranslations):\n\n LE_MAGIC=0x950412de\n BE_MAGIC=0xde120495\n \n \n VERSIONS=(0,1)\n \n def _get_versions(self,version):\n ''\n return (version >>16,version&0xffff)\n \n def _parse(self,fp):\n ''\n \n \n from struct import unpack\n filename=getattr(fp,'name','')\n \n \n self._catalog=catalog={}\n self.plural=lambda n:int(n !=1)\n buf=fp.read()\n buflen=len(buf)\n \n magic=unpack('4I',buf[4:20])\n ii='>II'\n else :\n raise OSError(0,'Bad magic number',filename)\n \n major_version,minor_version=self._get_versions(version)\n \n if major_version not in self.VERSIONS:\n raise OSError(0,'Bad version number '+str(major_version),filename)\n \n \n \n for i in range(0,msgcount):\n mlen,moff=unpack(ii,buf[masteridx:masteridx+8])\n mend=moff+mlen\n tlen,toff=unpack(ii,buf[transidx:transidx+8])\n tend=toff+tlen\n if mend startpos:\n parentpos=(pos -1)>>1\n parent=heap[parentpos]\n if newitem startpos:\n parentpos=(pos -1)>>1\n parent=heap[parentpos]\n if parent 1:\n try :\n while True :\n value,order,next=s=h[0]\n yield value\n s[0]=next()\n _heapreplace(h,s)\n except StopIteration:\n _heappop(h)\n if h:\n \n value,order,next=h[0]\n yield value\n yield from next.__self__\n return\n \n for order,it in enumerate(map(iter,iterables)):\n try :\n next=it.__next__\n value=next()\n h_append([key(value),order *direction,value,next])\n except StopIteration:\n pass\n _heapify(h)\n while len(h)>1:\n try :\n while True :\n key_value,order,value,next=s=h[0]\n yield value\n value=next()\n s[0]=key(value)\n s[2]=value\n _heapreplace(h,s)\n except StopIteration:\n _heappop(h)\n if h:\n key_value,order,value,next=h[0]\n yield value\n yield from next.__self__\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef nsmallest(n,iterable,key=None ):\n ''\n\n\n \n \n \n if n ==1:\n it=iter(iterable)\n sentinel=object()\n if key is None :\n result=min(it,default=sentinel)\n else :\n result=min(it,default=sentinel,key=key)\n return []if result is sentinel else [result]\n \n \n try :\n size=len(iterable)\n except (TypeError,AttributeError):\n pass\n else :\n if n >=size:\n return sorted(iterable,key=key)[:n]\n \n \n if key is None :\n it=iter(iterable)\n \n \n result=[(elem,i)for i,elem in zip(range(n),it)]\n if not result:\n return result\n _heapify_max(result)\n top=result[0][0]\n order=n\n _heapreplace=_heapreplace_max\n for elem in it:\n if elem =size:\n return sorted(iterable,key=key,reverse=True )[:n]\n \n \n if key is None :\n it=iter(iterable)\n result=[(elem,i)for i,elem in zip(range(0,-n,-1),it)]\n if not result:\n return result\n heapify(result)\n top=result[0][0]\n order=-n\n _heapreplace=heapreplace\n for elem in it:\n if top ',\n'Yury Selivanov ')\n\nimport abc\nimport dis\nimport collections.abc\nimport enum\nimport importlib.machinery\nimport itertools\nimport linecache\nimport os\nimport re\nimport sys\nimport tokenize\nimport token\nimport types\nimport warnings\nimport functools\nimport builtins\nfrom operator import attrgetter\nfrom collections import namedtuple,OrderedDict\n\n\n\nmod_dict=globals()\nfor k,v in dis.COMPILER_FLAG_NAMES.items():\n mod_dict[\"CO_\"+v]=k\n \n \nTPFLAGS_IS_ABSTRACT=1 <<20\n\n\ndef ismodule(object):\n ''\n\n\n\n\n \n return isinstance(object,types.ModuleType)\n \ndef isclass(object):\n ''\n\n\n\n \n return isinstance(object,type)\n \ndef ismethod(object):\n ''\n\n\n\n\n\n \n return isinstance(object,types.MethodType)\n \ndef ismethoddescriptor(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if isclass(object)or ismethod(object)or isfunction(object):\n \n return False\n tp=type(object)\n return hasattr(tp,\"__get__\")and not hasattr(tp,\"__set__\")\n \ndef isdatadescriptor(object):\n ''\n\n\n\n\n\n \n if isclass(object)or ismethod(object)or isfunction(object):\n \n return False\n tp=type(object)\n return hasattr(tp,\"__set__\")and hasattr(tp,\"__get__\")\n \nif hasattr(types,'MemberDescriptorType'):\n\n def ismemberdescriptor(object):\n ''\n\n\n \n return isinstance(object,types.MemberDescriptorType)\nelse :\n\n def ismemberdescriptor(object):\n ''\n\n\n \n return False\n \nif hasattr(types,'GetSetDescriptorType'):\n\n def isgetsetdescriptor(object):\n ''\n\n\n \n return isinstance(object,types.GetSetDescriptorType)\nelse :\n\n def isgetsetdescriptor(object):\n ''\n\n\n \n return False\n \ndef isfunction(object):\n ''\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.FunctionType)\n \ndef isgeneratorfunction(object):\n ''\n\n\n \n return bool((isfunction(object)or ismethod(object))and\n object.__code__.co_flags&CO_GENERATOR)\n \ndef iscoroutinefunction(object):\n ''\n\n\n \n return bool((isfunction(object)or ismethod(object))and\n object.__code__.co_flags&CO_COROUTINE)\n \ndef isasyncgenfunction(object):\n ''\n\n\n\n \n return bool((isfunction(object)or ismethod(object))and\n object.__code__.co_flags&CO_ASYNC_GENERATOR)\n \ndef isasyncgen(object):\n ''\n return isinstance(object,types.AsyncGeneratorType)\n \ndef isgenerator(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.GeneratorType)\n \ndef iscoroutine(object):\n ''\n return isinstance(object,types.CoroutineType)\n \ndef isawaitable(object):\n ''\n return (isinstance(object,types.CoroutineType)or\n isinstance(object,types.GeneratorType)and\n bool(object.gi_code.co_flags&CO_ITERABLE_COROUTINE)or\n isinstance(object,collections.abc.Awaitable))\n \ndef istraceback(object):\n ''\n\n\n\n\n\n \n return isinstance(object,types.TracebackType)\n \ndef isframe(object):\n ''\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.FrameType)\n \ndef iscode(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.CodeType)\n \ndef isbuiltin(object):\n ''\n\n\n\n\n \n return isinstance(object,types.BuiltinFunctionType)\n \ndef isroutine(object):\n ''\n return (isbuiltin(object)\n or isfunction(object)\n or ismethod(object)\n or ismethoddescriptor(object))\n \ndef isabstract(object):\n ''\n if not isinstance(object,type):\n return False\n if object.__flags__&TPFLAGS_IS_ABSTRACT:\n return True\n if not issubclass(type(object),abc.ABCMeta):\n return False\n if hasattr(object,'__abstractmethods__'):\n \n \n return False\n \n \n for name,value in object.__dict__.items():\n if getattr(value,\"__isabstractmethod__\",False ):\n return True\n for base in object.__bases__:\n for name in getattr(base,\"__abstractmethods__\",()):\n value=getattr(object,name,None )\n if getattr(value,\"__isabstractmethod__\",False ):\n return True\n return False\n \ndef getmembers(object,predicate=None ):\n ''\n \n if isclass(object):\n mro=(object,)+getmro(object)\n else :\n mro=()\n results=[]\n processed=set()\n names=dir(object)\n \n \n \n try :\n for base in object.__bases__:\n for k,v in base.__dict__.items():\n if isinstance(v,types.DynamicClassAttribute):\n names.append(k)\n except AttributeError:\n pass\n for key in names:\n \n \n \n try :\n value=getattr(object,key)\n \n if key in processed:\n raise AttributeError\n except AttributeError:\n for base in mro:\n if key in base.__dict__:\n value=base.__dict__[key]\n break\n else :\n \n \n continue\n if not predicate or predicate(value):\n results.append((key,value))\n processed.add(key)\n results.sort(key=lambda pair:pair[0])\n return results\n \nAttribute=namedtuple('Attribute','name kind defining_class object')\n\ndef classify_class_attrs(cls):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n mro=getmro(cls)\n metamro=getmro(type(cls))\n metamro=tuple(cls for cls in metamro if cls not in (type,object))\n class_bases=(cls,)+mro\n all_bases=class_bases+metamro\n names=dir(cls)\n \n \n \n for base in mro:\n for k,v in base.__dict__.items():\n if isinstance(v,types.DynamicClassAttribute):\n names.append(k)\n result=[]\n processed=set()\n \n for name in names:\n \n \n \n \n \n \n \n \n \n homecls=None\n get_obj=None\n dict_obj=None\n if name not in processed:\n try :\n if name =='__dict__':\n raise Exception(\"__dict__ is special, don't want the proxy\")\n get_obj=getattr(cls,name)\n except Exception as exc:\n pass\n else :\n homecls=getattr(get_obj,\"__objclass__\",homecls)\n if homecls not in class_bases:\n \n \n homecls=None\n last_cls=None\n \n for srch_cls in class_bases:\n srch_obj=getattr(srch_cls,name,None )\n if srch_obj is get_obj:\n last_cls=srch_cls\n \n for srch_cls in metamro:\n try :\n srch_obj=srch_cls.__getattr__(cls,name)\n except AttributeError:\n continue\n if srch_obj is get_obj:\n last_cls=srch_cls\n if last_cls is not None :\n homecls=last_cls\n for base in all_bases:\n if name in base.__dict__:\n dict_obj=base.__dict__[name]\n if homecls not in metamro:\n homecls=base\n break\n if homecls is None :\n \n \n continue\n obj=get_obj if get_obj is not None else dict_obj\n \n if isinstance(dict_obj,(staticmethod,types.BuiltinMethodType)):\n kind=\"static method\"\n obj=dict_obj\n elif isinstance(dict_obj,(classmethod,types.ClassMethodDescriptorType)):\n kind=\"class method\"\n obj=dict_obj\n elif isinstance(dict_obj,property):\n kind=\"property\"\n obj=dict_obj\n elif isroutine(obj):\n kind=\"method\"\n else :\n kind=\"data\"\n result.append(Attribute(name,kind,homecls,obj))\n processed.add(name)\n return result\n \n \n \ndef getmro(cls):\n ''\n return cls.__mro__\n \n \n \ndef unwrap(func,*,stop=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if stop is None :\n def _is_wrapper(f):\n return hasattr(f,'__wrapped__')\n else :\n def _is_wrapper(f):\n return hasattr(f,'__wrapped__')and not stop(f)\n f=func\n \n \n memo={id(f):f}\n recursion_limit=sys.getrecursionlimit()\n while _is_wrapper(func):\n func=func.__wrapped__\n id_func=id(func)\n if (id_func in memo)or (len(memo)>=recursion_limit):\n raise ValueError('wrapper loop when unwrapping {!r}'.format(f))\n memo[id_func]=func\n return func\n \n \ndef indentsize(line):\n ''\n expline=line.expandtabs()\n return len(expline)-len(expline.lstrip())\n \ndef _findclass(func):\n cls=sys.modules.get(func.__module__)\n if cls is None :\n return None\n for name in func.__qualname__.split('.')[:-1]:\n cls=getattr(cls,name)\n if not isclass(cls):\n return None\n return cls\n \ndef _finddoc(obj):\n if isclass(obj):\n for base in obj.__mro__:\n if base is not object:\n try :\n doc=base.__doc__\n except AttributeError:\n continue\n if doc is not None :\n return doc\n return None\n \n if ismethod(obj):\n name=obj.__func__.__name__\n self=obj.__self__\n if (isclass(self)and\n getattr(getattr(self,name,None ),'__func__')is obj.__func__):\n \n cls=self\n else :\n cls=self.__class__\n elif isfunction(obj):\n name=obj.__name__\n cls=_findclass(obj)\n if cls is None or getattr(cls,name)is not obj:\n return None\n elif isbuiltin(obj):\n name=obj.__name__\n self=obj.__self__\n if (isclass(self)and\n self.__qualname__+'.'+name ==obj.__qualname__):\n \n cls=self\n else :\n cls=self.__class__\n \n elif isinstance(obj,property):\n func=obj.fget\n name=func.__name__\n cls=_findclass(func)\n if cls is None or getattr(cls,name)is not obj:\n return None\n elif ismethoddescriptor(obj)or isdatadescriptor(obj):\n name=obj.__name__\n cls=obj.__objclass__\n if getattr(cls,name)is not obj:\n return None\n else :\n return None\n \n for base in cls.__mro__:\n try :\n doc=getattr(base,name).__doc__\n except AttributeError:\n continue\n if doc is not None :\n return doc\n return None\n \ndef getdoc(object):\n ''\n\n\n\n \n try :\n doc=object.__doc__\n except AttributeError:\n return None\n if doc is None :\n try :\n doc=_finddoc(object)\n except (AttributeError,TypeError):\n return None\n if not isinstance(doc,str):\n return None\n return cleandoc(doc)\n \ndef cleandoc(doc):\n ''\n\n\n \n try :\n lines=doc.expandtabs().split('\\n')\n except UnicodeError:\n return None\n else :\n \n margin=sys.maxsize\n for line in lines[1:]:\n content=len(line.lstrip())\n if content:\n indent=len(line)-content\n margin=min(margin,indent)\n \n if lines:\n lines[0]=lines[0].lstrip()\n if margin ')):\n raise OSError('source code not available')\n \n module=getmodule(object,file)\n if module:\n lines=linecache.getlines(file,module.__dict__)\n else :\n lines=linecache.getlines(file)\n if not lines:\n raise OSError('could not get source code')\n \n if ismodule(object):\n return lines,0\n \n if isclass(object):\n name=object.__name__\n pat=re.compile(r'^(\\s*)class\\s*'+name+r'\\b')\n \n \n \n candidates=[]\n for i in range(len(lines)):\n match=pat.match(lines[i])\n if match:\n \n if lines[i][0]=='c':\n return lines,i\n \n candidates.append((match.group(1),i))\n if candidates:\n \n \n candidates.sort()\n return lines,candidates[0][1]\n else :\n raise OSError('could not find class definition')\n \n if ismethod(object):\n object=object.__func__\n if isfunction(object):\n object=object.__code__\n if istraceback(object):\n object=object.tb_frame\n if isframe(object):\n object=object.f_code\n if iscode(object):\n if not hasattr(object,'co_firstlineno'):\n raise OSError('could not find function definition')\n lnum=object.co_firstlineno -1\n pat=re.compile(r'^(\\s*def\\s)|(\\s*async\\s+def\\s)|(.*(?0:\n if pat.match(lines[lnum]):break\n lnum=lnum -1\n return lines,lnum\n raise OSError('could not find code object')\n \ndef getcomments(object):\n ''\n\n\n \n try :\n lines,lnum=findsource(object)\n except (OSError,TypeError):\n return None\n \n if ismodule(object):\n \n start=0\n if lines and lines[0][:2]=='#!':start=1\n while start 0:\n indent=indentsize(lines[lnum])\n end=lnum -1\n if end >=0 and lines[end].lstrip()[:1]=='#'and\\\n indentsize(lines[end])==indent:\n comments=[lines[end].expandtabs().lstrip()]\n if end >0:\n end=end -1\n comment=lines[end].expandtabs().lstrip()\n while comment[:1]=='#'and indentsize(lines[end])==indent:\n comments[:0]=[comment]\n end=end -1\n if end <0:break\n comment=lines[end].expandtabs().lstrip()\n while comments and comments[0].strip()=='#':\n comments[:1]=[]\n while comments and comments[-1].strip()=='#':\n comments[-1:]=[]\n return ''.join(comments)\n \nclass EndOfBlock(Exception):pass\n\nclass BlockFinder:\n ''\n def __init__(self):\n self.indent=0\n self.islambda=False\n self.started=False\n self.passline=False\n self.indecorator=False\n self.decoratorhasargs=False\n self.last=1\n \n def tokeneater(self,type,token,srowcol,erowcol,line):\n if not self.started and not self.indecorator:\n \n if token ==\"@\":\n self.indecorator=True\n \n elif token in (\"def\",\"class\",\"lambda\"):\n if token ==\"lambda\":\n self.islambda=True\n self.started=True\n self.passline=True\n elif token ==\"(\":\n if self.indecorator:\n self.decoratorhasargs=True\n elif token ==\")\":\n if self.indecorator:\n self.indecorator=False\n self.decoratorhasargs=False\n elif type ==tokenize.NEWLINE:\n self.passline=False\n self.last=srowcol[0]\n if self.islambda:\n raise EndOfBlock\n \n \n if self.indecorator and not self.decoratorhasargs:\n self.indecorator=False\n elif self.passline:\n pass\n elif type ==tokenize.INDENT:\n self.indent=self.indent+1\n self.passline=True\n elif type ==tokenize.DEDENT:\n self.indent=self.indent -1\n \n \n \n if self.indent <=0:\n raise EndOfBlock\n elif self.indent ==0 and type not in (tokenize.COMMENT,tokenize.NL):\n \n \n raise EndOfBlock\n \ndef getblock(lines):\n ''\n blockfinder=BlockFinder()\n try :\n tokens=tokenize.generate_tokens(iter(lines).__next__)\n for _token in tokens:\n blockfinder.tokeneater(*_token)\n except (EndOfBlock,IndentationError):\n pass\n return lines[:blockfinder.last]\n \ndef getsourcelines(object):\n ''\n\n\n\n\n\n \n object=unwrap(object)\n lines,lnum=findsource(object)\n \n if ismodule(object):\n return lines,0\n else :\n return getblock(lines[lnum:]),lnum+1\n \ndef getsource(object):\n ''\n\n\n\n \n lines,lnum=getsourcelines(object)\n return ''.join(lines)\n \n \ndef walktree(classes,children,parent):\n ''\n results=[]\n classes.sort(key=attrgetter('__module__','__name__'))\n for c in classes:\n results.append((c,c.__bases__))\n if c in children:\n results.append(walktree(children[c],children,c))\n return results\n \ndef getclasstree(classes,unique=False ):\n ''\n\n\n\n\n\n\n \n children={}\n roots=[]\n for c in classes:\n if c.__bases__:\n for parent in c.__bases__:\n if not parent in children:\n children[parent]=[]\n if c not in children[parent]:\n children[parent].append(c)\n if unique and parent in classes:break\n elif c not in roots:\n roots.append(c)\n for parent in children:\n if parent not in classes:\n roots.append(parent)\n return walktree(roots,children,None )\n \n \nArguments=namedtuple('Arguments','args, varargs, varkw')\n\ndef getargs(co):\n ''\n\n\n\n\n \n args,varargs,kwonlyargs,varkw=_getfullargs(co)\n return Arguments(args+kwonlyargs,varargs,varkw)\n \ndef _getfullargs(co):\n ''\n\n\n\n \n \n if not iscode(co):\n raise TypeError('{!r} is not a code object'.format(co))\n \n nargs=co.co_argcount\n names=co.co_varnames\n nkwargs=co.co_kwonlyargcount\n args=list(names[:nargs])\n kwonlyargs=list(names[nargs:nargs+nkwargs])\n step=0\n \n nargs +=nkwargs\n varargs=None\n if co.co_flags&CO_VARARGS:\n varargs=co.co_varnames[nargs]\n nargs=nargs+1\n varkw=None\n if co.co_flags&CO_VARKEYWORDS:\n varkw=co.co_varnames[nargs]\n return args,varargs,kwonlyargs,varkw\n \n \nArgSpec=namedtuple('ArgSpec','args varargs keywords defaults')\n\ndef getargspec(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n warnings.warn(\"inspect.getargspec() is deprecated, \"\n \"use inspect.signature() or inspect.getfullargspec()\",\n DeprecationWarning,stacklevel=2)\n args,varargs,varkw,defaults,kwonlyargs,kwonlydefaults,ann=\\\n getfullargspec(func)\n if kwonlyargs or ann:\n raise ValueError(\"Function has keyword-only parameters or annotations\"\n \", use getfullargspec() API which can support them\")\n return ArgSpec(args,varargs,varkw,defaults)\n \nFullArgSpec=namedtuple('FullArgSpec',\n'args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations')\n\ndef getfullargspec(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try :\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n sig=_signature_from_callable(func,\n follow_wrapper_chains=False ,\n skip_bound_arg=False ,\n sigcls=Signature)\n except Exception as ex:\n \n \n \n \n raise TypeError('unsupported callable')from ex\n \n args=[]\n varargs=None\n varkw=None\n kwonlyargs=[]\n defaults=()\n annotations={}\n defaults=()\n kwdefaults={}\n \n if sig.return_annotation is not sig.empty:\n annotations['return']=sig.return_annotation\n \n for param in sig.parameters.values():\n kind=param.kind\n name=param.name\n \n if kind is _POSITIONAL_ONLY:\n args.append(name)\n elif kind is _POSITIONAL_OR_KEYWORD:\n args.append(name)\n if param.default is not param.empty:\n defaults +=(param.default,)\n elif kind is _VAR_POSITIONAL:\n varargs=name\n elif kind is _KEYWORD_ONLY:\n kwonlyargs.append(name)\n if param.default is not param.empty:\n kwdefaults[name]=param.default\n elif kind is _VAR_KEYWORD:\n varkw=name\n \n if param.annotation is not param.empty:\n annotations[name]=param.annotation\n \n if not kwdefaults:\n \n kwdefaults=None\n \n if not defaults:\n \n defaults=None\n \n return FullArgSpec(args,varargs,varkw,defaults,\n kwonlyargs,kwdefaults,annotations)\n \n \nArgInfo=namedtuple('ArgInfo','args varargs keywords locals')\n\ndef getargvalues(frame):\n ''\n\n\n\n\n \n args,varargs,varkw=getargs(frame.f_code)\n return ArgInfo(args,varargs,varkw,frame.f_locals)\n \ndef formatannotation(annotation,base_module=None ):\n if getattr(annotation,'__module__',None )=='typing':\n return repr(annotation).replace('typing.','')\n if isinstance(annotation,type):\n if annotation.__module__ in ('builtins',base_module):\n return annotation.__qualname__\n return annotation.__module__+'.'+annotation.__qualname__\n return repr(annotation)\n \ndef formatannotationrelativeto(object):\n module=getattr(object,'__module__',None )\n def _formatannotation(annotation):\n return formatannotation(annotation,module)\n return _formatannotation\n \ndef formatargspec(args,varargs=None ,varkw=None ,defaults=None ,\nkwonlyargs=(),kwonlydefaults={},annotations={},\nformatarg=str,\nformatvarargs=lambda name:'*'+name,\nformatvarkw=lambda name:'**'+name,\nformatvalue=lambda value:'='+repr(value),\nformatreturns=lambda text:' -> '+text,\nformatannotation=formatannotation):\n ''\n\n\n\n\n\n\n\n\n\n \n \n from warnings import warn\n \n warn(\"`formatargspec` is deprecated since Python 3.5. Use `signature` and \"\n \"the `Signature` object directly\",\n DeprecationWarning,\n stacklevel=2)\n \n def formatargandannotation(arg):\n result=formatarg(arg)\n if arg in annotations:\n result +=': '+formatannotation(annotations[arg])\n return result\n specs=[]\n if defaults:\n firstdefault=len(args)-len(defaults)\n for i,arg in enumerate(args):\n spec=formatargandannotation(arg)\n if defaults and i >=firstdefault:\n spec=spec+formatvalue(defaults[i -firstdefault])\n specs.append(spec)\n if varargs is not None :\n specs.append(formatvarargs(formatargandannotation(varargs)))\n else :\n if kwonlyargs:\n specs.append('*')\n if kwonlyargs:\n for kwonlyarg in kwonlyargs:\n spec=formatargandannotation(kwonlyarg)\n if kwonlydefaults and kwonlyarg in kwonlydefaults:\n spec +=formatvalue(kwonlydefaults[kwonlyarg])\n specs.append(spec)\n if varkw is not None :\n specs.append(formatvarkw(formatargandannotation(varkw)))\n result='('+', '.join(specs)+')'\n if 'return'in annotations:\n result +=formatreturns(formatannotation(annotations['return']))\n return result\n \ndef formatargvalues(args,varargs,varkw,locals,\nformatarg=str,\nformatvarargs=lambda name:'*'+name,\nformatvarkw=lambda name:'**'+name,\nformatvalue=lambda value:'='+repr(value)):\n ''\n\n\n\n\n \n def convert(name,locals=locals,\n formatarg=formatarg,formatvalue=formatvalue):\n return formatarg(name)+formatvalue(locals[name])\n specs=[]\n for i in range(len(args)):\n specs.append(convert(args[i]))\n if varargs:\n specs.append(formatvarargs(varargs)+formatvalue(locals[varargs]))\n if varkw:\n specs.append(formatvarkw(varkw)+formatvalue(locals[varkw]))\n return '('+', '.join(specs)+')'\n \ndef _missing_arguments(f_name,argnames,pos,values):\n names=[repr(name)for name in argnames if name not in values]\n missing=len(names)\n if missing ==1:\n s=names[0]\n elif missing ==2:\n s=\"{} and {}\".format(*names)\n else :\n tail=\", {} and {}\".format(*names[-2:])\n del names[-2:]\n s=\", \".join(names)+tail\n raise TypeError(\"%s() missing %i required %s argument%s: %s\"%\n (f_name,missing,\n \"positional\"if pos else \"keyword-only\",\n \"\"if missing ==1 else \"s\",s))\n \ndef _too_many(f_name,args,kwonly,varargs,defcount,given,values):\n atleast=len(args)-defcount\n kwonly_given=len([arg for arg in kwonly if arg in values])\n if varargs:\n plural=atleast !=1\n sig=\"at least %d\"%(atleast,)\n elif defcount:\n plural=True\n sig=\"from %d to %d\"%(atleast,len(args))\n else :\n plural=len(args)!=1\n sig=str(len(args))\n kwonly_sig=\"\"\n if kwonly_given:\n msg=\" positional argument%s (and %d keyword-only argument%s)\"\n kwonly_sig=(msg %(\"s\"if given !=1 else \"\",kwonly_given,\n \"s\"if kwonly_given !=1 else \"\"))\n raise TypeError(\"%s() takes %s positional argument%s but %d%s %s given\"%\n (f_name,sig,\"s\"if plural else \"\",given,kwonly_sig,\n \"was\"if given ==1 and not kwonly_given else \"were\"))\n \ndef getcallargs(*func_and_positional,**named):\n ''\n\n\n\n \n func=func_and_positional[0]\n positional=func_and_positional[1:]\n spec=getfullargspec(func)\n args,varargs,varkw,defaults,kwonlyargs,kwonlydefaults,ann=spec\n f_name=func.__name__\n arg2value={}\n \n \n if ismethod(func)and func.__self__ is not None :\n \n positional=(func.__self__,)+positional\n num_pos=len(positional)\n num_args=len(args)\n num_defaults=len(defaults)if defaults else 0\n \n n=min(num_pos,num_args)\n for i in range(n):\n arg2value[args[i]]=positional[i]\n if varargs:\n arg2value[varargs]=tuple(positional[n:])\n possible_kwargs=set(args+kwonlyargs)\n if varkw:\n arg2value[varkw]={}\n for kw,value in named.items():\n if kw not in possible_kwargs:\n if not varkw:\n raise TypeError(\"%s() got an unexpected keyword argument %r\"%\n (f_name,kw))\n arg2value[varkw][kw]=value\n continue\n if kw in arg2value:\n raise TypeError(\"%s() got multiple values for argument %r\"%\n (f_name,kw))\n arg2value[kw]=value\n if num_pos >num_args and not varargs:\n _too_many(f_name,args,kwonlyargs,varargs,num_defaults,\n num_pos,arg2value)\n if num_pos 0:\n start=lineno -1 -context //2\n try :\n lines,lnum=findsource(frame)\n except OSError:\n lines=index=None\n else :\n start=max(0,min(start,len(lines)-context))\n lines=lines[start:start+context]\n index=lineno -1 -start\n else :\n lines=index=None\n \n return Traceback(filename,lineno,frame.f_code.co_name,lines,index)\n \ndef getlineno(frame):\n ''\n \n return frame.f_lineno\n \nFrameInfo=namedtuple('FrameInfo',('frame',)+Traceback._fields)\n\ndef getouterframes(frame,context=1):\n ''\n\n\n \n framelist=[]\n while frame:\n frameinfo=(frame,)+getframeinfo(frame,context)\n framelist.append(FrameInfo(*frameinfo))\n frame=frame.f_back\n return framelist\n \ndef getinnerframes(tb,context=1):\n ''\n\n\n \n framelist=[]\n while tb:\n frameinfo=(tb.tb_frame,)+getframeinfo(tb,context)\n framelist.append(FrameInfo(*frameinfo))\n tb=tb.tb_next\n return framelist\n \ndef currentframe():\n ''\n return sys._getframe(1)if hasattr(sys,\"_getframe\")else None\n \ndef stack(context=1):\n ''\n return getouterframes(sys._getframe(1),context)\n \ndef trace(context=1):\n ''\n return getinnerframes(sys.exc_info()[2],context)\n \n \n \n \n_sentinel=object()\n\ndef _static_getmro(klass):\n return type.__dict__['__mro__'].__get__(klass)\n \ndef _check_instance(obj,attr):\n instance_dict={}\n try :\n instance_dict=object.__getattribute__(obj,\"__dict__\")\n except AttributeError:\n pass\n return dict.get(instance_dict,attr,_sentinel)\n \n \ndef _check_class(klass,attr):\n for entry in _static_getmro(klass):\n if _shadowed_dict(type(entry))is _sentinel:\n try :\n return entry.__dict__[attr]\n except KeyError:\n pass\n return _sentinel\n \ndef _is_type(obj):\n try :\n _static_getmro(obj)\n except TypeError:\n return False\n return True\n \ndef _shadowed_dict(klass):\n dict_attr=type.__dict__[\"__dict__\"]\n for entry in _static_getmro(klass):\n try :\n class_dict=dict_attr.__get__(entry)[\"__dict__\"]\n except KeyError:\n pass\n else :\n if not (type(class_dict)is types.GetSetDescriptorType and\n class_dict.__name__ ==\"__dict__\"and\n class_dict.__objclass__ is entry):\n return class_dict\n return _sentinel\n \ndef getattr_static(obj,attr,default=_sentinel):\n ''\n\n\n\n\n\n\n\n\n \n instance_result=_sentinel\n if not _is_type(obj):\n klass=type(obj)\n dict_attr=_shadowed_dict(klass)\n if (dict_attr is _sentinel or\n type(dict_attr)is types.MemberDescriptorType):\n instance_result=_check_instance(obj,attr)\n else :\n klass=obj\n \n klass_result=_check_class(klass,attr)\n \n if instance_result is not _sentinel and klass_result is not _sentinel:\n if (_check_class(type(klass_result),'__get__')is not _sentinel and\n _check_class(type(klass_result),'__set__')is not _sentinel):\n return klass_result\n \n if instance_result is not _sentinel:\n return instance_result\n if klass_result is not _sentinel:\n return klass_result\n \n if obj is klass:\n \n for entry in _static_getmro(type(klass)):\n if _shadowed_dict(type(entry))is _sentinel:\n try :\n return entry.__dict__[attr]\n except KeyError:\n pass\n if default is not _sentinel:\n return default\n raise AttributeError(attr)\n \n \n \n \nGEN_CREATED='GEN_CREATED'\nGEN_RUNNING='GEN_RUNNING'\nGEN_SUSPENDED='GEN_SUSPENDED'\nGEN_CLOSED='GEN_CLOSED'\n\ndef getgeneratorstate(generator):\n ''\n\n\n\n\n\n\n \n if generator.gi_running:\n return GEN_RUNNING\n if generator.gi_frame is None :\n return GEN_CLOSED\n if generator.gi_frame.f_lasti ==-1:\n return GEN_CREATED\n return GEN_SUSPENDED\n \n \ndef getgeneratorlocals(generator):\n ''\n\n\n\n \n \n if not isgenerator(generator):\n raise TypeError(\"{!r} is not a Python generator\".format(generator))\n \n frame=getattr(generator,\"gi_frame\",None )\n if frame is not None :\n return generator.gi_frame.f_locals\n else :\n return {}\n \n \n \n \nCORO_CREATED='CORO_CREATED'\nCORO_RUNNING='CORO_RUNNING'\nCORO_SUSPENDED='CORO_SUSPENDED'\nCORO_CLOSED='CORO_CLOSED'\n\ndef getcoroutinestate(coroutine):\n ''\n\n\n\n\n\n\n \n if coroutine.cr_running:\n return CORO_RUNNING\n if coroutine.cr_frame is None :\n return CORO_CLOSED\n if coroutine.cr_frame.f_lasti ==-1:\n return CORO_CREATED\n return CORO_SUSPENDED\n \n \ndef getcoroutinelocals(coroutine):\n ''\n\n\n\n \n frame=getattr(coroutine,\"cr_frame\",None )\n if frame is not None :\n return frame.f_locals\n else :\n return {}\n \n \n \n \n \n \n \n_WrapperDescriptor=type(type.__call__)\n_MethodWrapper=type(all.__call__)\n_ClassMethodWrapper=type(int.__dict__['from_bytes'])\n\n_NonUserDefinedCallables=(_WrapperDescriptor,\n_MethodWrapper,\n_ClassMethodWrapper,\ntypes.BuiltinFunctionType)\n\n\ndef _signature_get_user_defined_method(cls,method_name):\n ''\n\n\n \n try :\n meth=getattr(cls,method_name)\n except AttributeError:\n return\n else :\n if not isinstance(meth,_NonUserDefinedCallables):\n \n \n return meth\n \n \ndef _signature_get_partial(wrapped_sig,partial,extra_args=()):\n ''\n\n\n \n \n old_params=wrapped_sig.parameters\n new_params=OrderedDict(old_params.items())\n \n partial_args=partial.args or ()\n partial_keywords=partial.keywords or {}\n \n if extra_args:\n partial_args=extra_args+partial_args\n \n try :\n ba=wrapped_sig.bind_partial(*partial_args,**partial_keywords)\n except TypeError as ex:\n msg='partial object {!r} has incorrect arguments'.format(partial)\n raise ValueError(msg)from ex\n \n \n transform_to_kwonly=False\n for param_name,param in old_params.items():\n try :\n arg_value=ba.arguments[param_name]\n except KeyError:\n pass\n else :\n if param.kind is _POSITIONAL_ONLY:\n \n \n new_params.pop(param_name)\n continue\n \n if param.kind is _POSITIONAL_OR_KEYWORD:\n if param_name in partial_keywords:\n \n \n \n \n \n \n \n \n \n \n \n \n transform_to_kwonly=True\n \n new_params[param_name]=param.replace(default=arg_value)\n else :\n \n new_params.pop(param.name)\n continue\n \n if param.kind is _KEYWORD_ONLY:\n \n new_params[param_name]=param.replace(default=arg_value)\n \n if transform_to_kwonly:\n assert param.kind is not _POSITIONAL_ONLY\n \n if param.kind is _POSITIONAL_OR_KEYWORD:\n new_param=new_params[param_name].replace(kind=_KEYWORD_ONLY)\n new_params[param_name]=new_param\n new_params.move_to_end(param_name)\n elif param.kind in (_KEYWORD_ONLY,_VAR_KEYWORD):\n new_params.move_to_end(param_name)\n elif param.kind is _VAR_POSITIONAL:\n new_params.pop(param.name)\n \n return wrapped_sig.replace(parameters=new_params.values())\n \n \ndef _signature_bound_method(sig):\n ''\n\n \n \n params=tuple(sig.parameters.values())\n \n if not params or params[0].kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n raise ValueError('invalid method signature')\n \n kind=params[0].kind\n if kind in (_POSITIONAL_OR_KEYWORD,_POSITIONAL_ONLY):\n \n \n params=params[1:]\n else :\n if kind is not _VAR_POSITIONAL:\n \n \n raise ValueError('invalid argument type')\n \n \n \n return sig.replace(parameters=params)\n \n \ndef _signature_is_builtin(obj):\n ''\n\n \n return (isbuiltin(obj)or\n ismethoddescriptor(obj)or\n isinstance(obj,_NonUserDefinedCallables)or\n \n \n obj in (type,object))\n \n \ndef _signature_is_functionlike(obj):\n ''\n\n\n\n \n \n if not callable(obj)or isclass(obj):\n \n \n return False\n \n name=getattr(obj,'__name__',None )\n code=getattr(obj,'__code__',None )\n defaults=getattr(obj,'__defaults__',_void)\n kwdefaults=getattr(obj,'__kwdefaults__',_void)\n annotations=getattr(obj,'__annotations__',None )\n \n return (isinstance(code,types.CodeType)and\n isinstance(name,str)and\n (defaults is None or isinstance(defaults,tuple))and\n (kwdefaults is None or isinstance(kwdefaults,dict))and\n isinstance(annotations,dict))\n \n \ndef _signature_get_bound_param(spec):\n ''\n\n\n\n\n \n \n assert spec.startswith('($')\n \n pos=spec.find(',')\n if pos ==-1:\n pos=spec.find(')')\n \n cpos=spec.find(':')\n assert cpos ==-1 or cpos >pos\n \n cpos=spec.find('=')\n assert cpos ==-1 or cpos >pos\n \n return spec[2:pos]\n \n \ndef _signature_strip_non_python_syntax(signature):\n ''\n\n\n\n\n\n\n\n\n\n \n \n if not signature:\n return signature,None ,None\n \n self_parameter=None\n last_positional_only=None\n \n lines=[l.encode('ascii')for l in signature.split('\\n')]\n generator=iter(lines).__next__\n token_stream=tokenize.tokenize(generator)\n \n delayed_comma=False\n skip_next_comma=False\n text=[]\n add=text.append\n \n current_parameter=0\n OP=token.OP\n ERRORTOKEN=token.ERRORTOKEN\n \n \n t=next(token_stream)\n assert t.type ==tokenize.ENCODING\n \n for t in token_stream:\n type,string=t.type,t.string\n \n if type ==OP:\n if string ==',':\n if skip_next_comma:\n skip_next_comma=False\n else :\n assert not delayed_comma\n delayed_comma=True\n current_parameter +=1\n continue\n \n if string =='/':\n assert not skip_next_comma\n assert last_positional_only is None\n skip_next_comma=True\n last_positional_only=current_parameter -1\n continue\n \n if (type ==ERRORTOKEN)and (string =='$'):\n assert self_parameter is None\n self_parameter=current_parameter\n continue\n \n if delayed_comma:\n delayed_comma=False\n if not ((type ==OP)and (string ==')')):\n add(', ')\n add(string)\n if (string ==','):\n add(' ')\n clean_signature=''.join(text)\n return clean_signature,self_parameter,last_positional_only\n \n \ndef _signature_fromstr(cls,obj,s,skip_bound_arg=True ):\n ''\n\n \n \n \n import ast\n \n Parameter=cls._parameter_cls\n \n clean_signature,self_parameter,last_positional_only=\\\n _signature_strip_non_python_syntax(s)\n \n program=\"def foo\"+clean_signature+\": pass\"\n \n try :\n module=ast.parse(program)\n except SyntaxError:\n module=None\n \n if not isinstance(module,ast.Module):\n raise ValueError(\"{!r} builtin has invalid signature\".format(obj))\n \n f=module.body[0]\n \n parameters=[]\n empty=Parameter.empty\n invalid=object()\n \n module=None\n module_dict={}\n module_name=getattr(obj,'__module__',None )\n if module_name:\n module=sys.modules.get(module_name,None )\n if module:\n module_dict=module.__dict__\n sys_module_dict=sys.modules\n \n def parse_name(node):\n assert isinstance(node,ast.arg)\n if node.annotation !=None :\n raise ValueError(\"Annotations are not currently supported\")\n return node.arg\n \n def wrap_value(s):\n try :\n value=eval(s,module_dict)\n except NameError:\n try :\n value=eval(s,sys_module_dict)\n except NameError:\n raise RuntimeError()\n \n if isinstance(value,str):\n return ast.Str(value)\n if isinstance(value,(int,float)):\n return ast.Num(value)\n if isinstance(value,bytes):\n return ast.Bytes(value)\n if value in (True ,False ,None ):\n return ast.NameConstant(value)\n raise RuntimeError()\n \n class RewriteSymbolics(ast.NodeTransformer):\n def visit_Attribute(self,node):\n a=[]\n n=node\n while isinstance(n,ast.Attribute):\n a.append(n.attr)\n n=n.value\n if not isinstance(n,ast.Name):\n raise RuntimeError()\n a.append(n.id)\n value=\".\".join(reversed(a))\n return wrap_value(value)\n \n def visit_Name(self,node):\n if not isinstance(node.ctx,ast.Load):\n raise ValueError()\n return wrap_value(node.id)\n \n def p(name_node,default_node,default=empty):\n name=parse_name(name_node)\n if name is invalid:\n return None\n if default_node and default_node is not _empty:\n try :\n default_node=RewriteSymbolics().visit(default_node)\n o=ast.literal_eval(default_node)\n except ValueError:\n o=invalid\n if o is invalid:\n return None\n default=o if o is not invalid else default\n parameters.append(Parameter(name,kind,default=default,annotation=empty))\n \n \n args=reversed(f.args.args)\n defaults=reversed(f.args.defaults)\n iter=itertools.zip_longest(args,defaults,fillvalue=None )\n if last_positional_only is not None :\n kind=Parameter.POSITIONAL_ONLY\n else :\n kind=Parameter.POSITIONAL_OR_KEYWORD\n for i,(name,default)in enumerate(reversed(list(iter))):\n p(name,default)\n if i ==last_positional_only:\n kind=Parameter.POSITIONAL_OR_KEYWORD\n \n \n if f.args.vararg:\n kind=Parameter.VAR_POSITIONAL\n p(f.args.vararg,empty)\n \n \n kind=Parameter.KEYWORD_ONLY\n for name,default in zip(f.args.kwonlyargs,f.args.kw_defaults):\n p(name,default)\n \n \n if f.args.kwarg:\n kind=Parameter.VAR_KEYWORD\n p(f.args.kwarg,empty)\n \n if self_parameter is not None :\n \n \n \n \n \n assert parameters\n _self=getattr(obj,'__self__',None )\n self_isbound=_self is not None\n self_ismodule=ismodule(_self)\n if self_isbound and (self_ismodule or skip_bound_arg):\n parameters.pop(0)\n else :\n \n p=parameters[0].replace(kind=Parameter.POSITIONAL_ONLY)\n parameters[0]=p\n \n return cls(parameters,return_annotation=cls.empty)\n \n \ndef _signature_from_builtin(cls,func,skip_bound_arg=True ):\n ''\n\n \n \n if not _signature_is_builtin(func):\n raise TypeError(\"{!r} is not a Python builtin \"\n \"function\".format(func))\n \n s=getattr(func,\"__text_signature__\",None )\n if not s:\n raise ValueError(\"no signature found for builtin {!r}\".format(func))\n \n return _signature_fromstr(cls,func,s,skip_bound_arg)\n \n \ndef _signature_from_function(cls,func):\n ''\n \n is_duck_function=False\n if not isfunction(func):\n if _signature_is_functionlike(func):\n is_duck_function=True\n else :\n \n \n raise TypeError('{!r} is not a Python function'.format(func))\n \n Parameter=cls._parameter_cls\n \n \n func_code=func.__code__\n pos_count=func_code.co_argcount\n arg_names=func_code.co_varnames\n positional=tuple(arg_names[:pos_count])\n keyword_only_count=func_code.co_kwonlyargcount\n keyword_only=arg_names[pos_count:(pos_count+keyword_only_count)]\n annotations=func.__annotations__\n defaults=func.__defaults__\n kwdefaults=func.__kwdefaults__\n \n if defaults:\n pos_default_count=len(defaults)\n else :\n pos_default_count=0\n \n parameters=[]\n \n \n non_default_count=pos_count -pos_default_count\n for name in positional[:non_default_count]:\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_POSITIONAL_OR_KEYWORD))\n \n \n for offset,name in enumerate(positional[non_default_count:]):\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_POSITIONAL_OR_KEYWORD,\n default=defaults[offset]))\n \n \n if func_code.co_flags&CO_VARARGS:\n name=arg_names[pos_count+keyword_only_count]\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_VAR_POSITIONAL))\n \n \n for name in keyword_only:\n default=_empty\n if kwdefaults is not None :\n default=kwdefaults.get(name,_empty)\n \n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_KEYWORD_ONLY,\n default=default))\n \n if func_code.co_flags&CO_VARKEYWORDS:\n index=pos_count+keyword_only_count\n if func_code.co_flags&CO_VARARGS:\n index +=1\n \n name=arg_names[index]\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_VAR_KEYWORD))\n \n \n \n return cls(parameters,\n return_annotation=annotations.get('return',_empty),\n __validate_parameters__=is_duck_function)\n \n \ndef _signature_from_callable(obj,*,\nfollow_wrapper_chains=True ,\nskip_bound_arg=True ,\nsigcls):\n\n ''\n\n \n \n if not callable(obj):\n raise TypeError('{!r} is not a callable object'.format(obj))\n \n if isinstance(obj,types.MethodType):\n \n \n sig=_signature_from_callable(\n obj.__func__,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n if skip_bound_arg:\n return _signature_bound_method(sig)\n else :\n return sig\n \n \n if follow_wrapper_chains:\n obj=unwrap(obj,stop=(lambda f:hasattr(f,\"__signature__\")))\n if isinstance(obj,types.MethodType):\n \n \n \n return _signature_from_callable(\n obj,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n try :\n sig=obj.__signature__\n except AttributeError:\n pass\n else :\n if sig is not None :\n if not isinstance(sig,Signature):\n raise TypeError(\n 'unexpected object {!r} in __signature__ '\n 'attribute'.format(sig))\n return sig\n \n try :\n partialmethod=obj._partialmethod\n except AttributeError:\n pass\n else :\n if isinstance(partialmethod,functools.partialmethod):\n \n \n \n \n \n \n \n wrapped_sig=_signature_from_callable(\n partialmethod.func,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n sig=_signature_get_partial(wrapped_sig,partialmethod,(None ,))\n first_wrapped_param=tuple(wrapped_sig.parameters.values())[0]\n if first_wrapped_param.kind is Parameter.VAR_POSITIONAL:\n \n \n return sig\n else :\n sig_params=tuple(sig.parameters.values())\n assert (not sig_params or\n first_wrapped_param is not sig_params[0])\n new_params=(first_wrapped_param,)+sig_params\n return sig.replace(parameters=new_params)\n \n if isfunction(obj)or _signature_is_functionlike(obj):\n \n \n return _signature_from_function(sigcls,obj)\n \n if _signature_is_builtin(obj):\n return _signature_from_builtin(sigcls,obj,\n skip_bound_arg=skip_bound_arg)\n \n if isinstance(obj,functools.partial):\n wrapped_sig=_signature_from_callable(\n obj.func,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n return _signature_get_partial(wrapped_sig,obj)\n \n sig=None\n if isinstance(obj,type):\n \n \n \n \n call=_signature_get_user_defined_method(type(obj),'__call__')\n if call is not None :\n sig=_signature_from_callable(\n call,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n else :\n \n new=_signature_get_user_defined_method(obj,'__new__')\n if new is not None :\n sig=_signature_from_callable(\n new,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n else :\n \n init=_signature_get_user_defined_method(obj,'__init__')\n if init is not None :\n sig=_signature_from_callable(\n init,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n if sig is None :\n \n \n \n for base in obj.__mro__[:-1]:\n \n \n \n \n \n \n \n try :\n text_sig=base.__text_signature__\n except AttributeError:\n pass\n else :\n if text_sig:\n \n \n return _signature_fromstr(sigcls,obj,text_sig)\n \n \n \n \n if type not in obj.__mro__:\n \n \n if (obj.__init__ is object.__init__ and\n obj.__new__ is object.__new__):\n \n return signature(object)\n else :\n raise ValueError(\n 'no signature found for builtin type {!r}'.format(obj))\n \n elif not isinstance(obj,_NonUserDefinedCallables):\n \n \n \n \n call=_signature_get_user_defined_method(type(obj),'__call__')\n if call is not None :\n try :\n sig=_signature_from_callable(\n call,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n except ValueError as ex:\n msg='no signature found for {!r}'.format(obj)\n raise ValueError(msg)from ex\n \n if sig is not None :\n \n \n if skip_bound_arg:\n return _signature_bound_method(sig)\n else :\n return sig\n \n if isinstance(obj,types.BuiltinFunctionType):\n \n msg='no signature found for builtin function {!r}'.format(obj)\n raise ValueError(msg)\n \n raise ValueError('callable {!r} is not supported by signature'.format(obj))\n \n \nclass _void:\n ''\n \n \nclass _empty:\n ''\n \n \nclass _ParameterKind(enum.IntEnum):\n POSITIONAL_ONLY=0\n POSITIONAL_OR_KEYWORD=1\n VAR_POSITIONAL=2\n KEYWORD_ONLY=3\n VAR_KEYWORD=4\n \n def __str__(self):\n return self._name_\n \n \n_POSITIONAL_ONLY=_ParameterKind.POSITIONAL_ONLY\n_POSITIONAL_OR_KEYWORD=_ParameterKind.POSITIONAL_OR_KEYWORD\n_VAR_POSITIONAL=_ParameterKind.VAR_POSITIONAL\n_KEYWORD_ONLY=_ParameterKind.KEYWORD_ONLY\n_VAR_KEYWORD=_ParameterKind.VAR_KEYWORD\n\n_PARAM_NAME_MAPPING={\n_POSITIONAL_ONLY:'positional-only',\n_POSITIONAL_OR_KEYWORD:'positional or keyword',\n_VAR_POSITIONAL:'variadic positional',\n_KEYWORD_ONLY:'keyword-only',\n_VAR_KEYWORD:'variadic keyword'\n}\n\n_get_paramkind_descr=_PARAM_NAME_MAPPING.__getitem__\n\n\nclass Parameter:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_name','_kind','_default','_annotation')\n \n POSITIONAL_ONLY=_POSITIONAL_ONLY\n POSITIONAL_OR_KEYWORD=_POSITIONAL_OR_KEYWORD\n VAR_POSITIONAL=_VAR_POSITIONAL\n KEYWORD_ONLY=_KEYWORD_ONLY\n VAR_KEYWORD=_VAR_KEYWORD\n \n empty=_empty\n \n def __init__(self,name,kind,*,default=_empty,annotation=_empty):\n try :\n self._kind=_ParameterKind(kind)\n except ValueError:\n raise ValueError(f'value {kind!r} is not a valid Parameter.kind')\n if default is not _empty:\n if self._kind in (_VAR_POSITIONAL,_VAR_KEYWORD):\n msg='{} parameters cannot have default values'\n msg=msg.format(_get_paramkind_descr(self._kind))\n raise ValueError(msg)\n self._default=default\n self._annotation=annotation\n \n if name is _empty:\n raise ValueError('name is a required attribute for Parameter')\n \n if not isinstance(name,str):\n msg='name must be a str, not a {}'.format(type(name).__name__)\n raise TypeError(msg)\n \n if name[0]=='.'and name[1:].isdigit():\n \n \n \n \n if self._kind !=_POSITIONAL_OR_KEYWORD:\n msg=(\n 'implicit arguments must be passed as '\n 'positional or keyword arguments, not {}'\n )\n msg=msg.format(_get_paramkind_descr(self._kind))\n raise ValueError(msg)\n self._kind=_POSITIONAL_ONLY\n name='implicit{}'.format(name[1:])\n \n if not name.isidentifier():\n raise ValueError('{!r} is not a valid parameter name'.format(name))\n \n self._name=name\n \n def __reduce__(self):\n return (type(self),\n (self._name,self._kind),\n {'_default':self._default,\n '_annotation':self._annotation})\n \n def __setstate__(self,state):\n self._default=state['_default']\n self._annotation=state['_annotation']\n \n @property\n def name(self):\n return self._name\n \n @property\n def default(self):\n return self._default\n \n @property\n def annotation(self):\n return self._annotation\n \n @property\n def kind(self):\n return self._kind\n \n def replace(self,*,name=_void,kind=_void,\n annotation=_void,default=_void):\n ''\n \n if name is _void:\n name=self._name\n \n if kind is _void:\n kind=self._kind\n \n if annotation is _void:\n annotation=self._annotation\n \n if default is _void:\n default=self._default\n \n return type(self)(name,kind,default=default,annotation=annotation)\n \n def __str__(self):\n kind=self.kind\n formatted=self._name\n \n \n if self._annotation is not _empty:\n formatted='{}: {}'.format(formatted,\n formatannotation(self._annotation))\n \n if self._default is not _empty:\n if self._annotation is not _empty:\n formatted='{} = {}'.format(formatted,repr(self._default))\n else :\n formatted='{}={}'.format(formatted,repr(self._default))\n \n if kind ==_VAR_POSITIONAL:\n formatted='*'+formatted\n elif kind ==_VAR_KEYWORD:\n formatted='**'+formatted\n \n return formatted\n \n def __repr__(self):\n return '<{} \"{}\">'.format(self.__class__.__name__,self)\n \n def __hash__(self):\n return hash((self.name,self.kind,self.annotation,self.default))\n \n def __eq__(self,other):\n if self is other:\n return True\n if not isinstance(other,Parameter):\n return NotImplemented\n return (self._name ==other._name and\n self._kind ==other._kind and\n self._default ==other._default and\n self._annotation ==other._annotation)\n \n \nclass BoundArguments:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('arguments','_signature','__weakref__')\n \n def __init__(self,signature,arguments):\n self.arguments=arguments\n self._signature=signature\n \n @property\n def signature(self):\n return self._signature\n \n @property\n def args(self):\n args=[]\n for param_name,param in self._signature.parameters.items():\n if param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n break\n \n try :\n arg=self.arguments[param_name]\n except KeyError:\n \n \n break\n else :\n if param.kind ==_VAR_POSITIONAL:\n \n args.extend(arg)\n else :\n \n args.append(arg)\n \n return tuple(args)\n \n @property\n def kwargs(self):\n kwargs={}\n kwargs_started=False\n for param_name,param in self._signature.parameters.items():\n if not kwargs_started:\n if param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n kwargs_started=True\n else :\n if param_name not in self.arguments:\n kwargs_started=True\n continue\n \n if not kwargs_started:\n continue\n \n try :\n arg=self.arguments[param_name]\n except KeyError:\n pass\n else :\n if param.kind ==_VAR_KEYWORD:\n \n kwargs.update(arg)\n else :\n \n kwargs[param_name]=arg\n \n return kwargs\n \n def apply_defaults(self):\n ''\n\n\n\n\n\n\n \n arguments=self.arguments\n new_arguments=[]\n for name,param in self._signature.parameters.items():\n try :\n new_arguments.append((name,arguments[name]))\n except KeyError:\n if param.default is not _empty:\n val=param.default\n elif param.kind is _VAR_POSITIONAL:\n val=()\n elif param.kind is _VAR_KEYWORD:\n val={}\n else :\n \n \n continue\n new_arguments.append((name,val))\n self.arguments=OrderedDict(new_arguments)\n \n def __eq__(self,other):\n if self is other:\n return True\n if not isinstance(other,BoundArguments):\n return NotImplemented\n return (self.signature ==other.signature and\n self.arguments ==other.arguments)\n \n def __setstate__(self,state):\n self._signature=state['_signature']\n self.arguments=state['arguments']\n \n def __getstate__(self):\n return {'_signature':self._signature,'arguments':self.arguments}\n \n def __repr__(self):\n args=[]\n for arg,value in self.arguments.items():\n args.append('{}={!r}'.format(arg,value))\n return '<{} ({})>'.format(self.__class__.__name__,', '.join(args))\n \n \nclass Signature:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_return_annotation','_parameters')\n \n _parameter_cls=Parameter\n _bound_arguments_cls=BoundArguments\n \n empty=_empty\n \n def __init__(self,parameters=None ,*,return_annotation=_empty,\n __validate_parameters__=True ):\n ''\n\n \n \n if parameters is None :\n params=OrderedDict()\n else :\n if __validate_parameters__:\n params=OrderedDict()\n top_kind=_POSITIONAL_ONLY\n kind_defaults=False\n \n for idx,param in enumerate(parameters):\n kind=param.kind\n name=param.name\n \n if kind top_kind:\n kind_defaults=False\n top_kind=kind\n \n if kind in (_POSITIONAL_ONLY,_POSITIONAL_OR_KEYWORD):\n if param.default is _empty:\n if kind_defaults:\n \n \n \n msg='non-default argument follows default '\\\n 'argument'\n raise ValueError(msg)\n else :\n \n kind_defaults=True\n \n if name in params:\n msg='duplicate parameter name: {!r}'.format(name)\n raise ValueError(msg)\n \n params[name]=param\n else :\n params=OrderedDict(((param.name,param)\n for param in parameters))\n \n self._parameters=types.MappingProxyType(params)\n self._return_annotation=return_annotation\n \n @classmethod\n def from_function(cls,func):\n ''\n \n warnings.warn(\"inspect.Signature.from_function() is deprecated, \"\n \"use Signature.from_callable()\",\n DeprecationWarning,stacklevel=2)\n return _signature_from_function(cls,func)\n \n @classmethod\n def from_builtin(cls,func):\n ''\n \n warnings.warn(\"inspect.Signature.from_builtin() is deprecated, \"\n \"use Signature.from_callable()\",\n DeprecationWarning,stacklevel=2)\n return _signature_from_builtin(cls,func)\n \n @classmethod\n def from_callable(cls,obj,*,follow_wrapped=True ):\n ''\n return _signature_from_callable(obj,sigcls=cls,\n follow_wrapper_chains=follow_wrapped)\n \n @property\n def parameters(self):\n return self._parameters\n \n @property\n def return_annotation(self):\n return self._return_annotation\n \n def replace(self,*,parameters=_void,return_annotation=_void):\n ''\n\n\n \n \n if parameters is _void:\n parameters=self.parameters.values()\n \n if return_annotation is _void:\n return_annotation=self._return_annotation\n \n return type(self)(parameters,\n return_annotation=return_annotation)\n \n def _hash_basis(self):\n params=tuple(param for param in self.parameters.values()\n if param.kind !=_KEYWORD_ONLY)\n \n kwo_params={param.name:param for param in self.parameters.values()\n if param.kind ==_KEYWORD_ONLY}\n \n return params,kwo_params,self.return_annotation\n \n def __hash__(self):\n params,kwo_params,return_annotation=self._hash_basis()\n kwo_params=frozenset(kwo_params.values())\n return hash((params,kwo_params,return_annotation))\n \n def __eq__(self,other):\n if self is other:\n return True\n if not isinstance(other,Signature):\n return NotImplemented\n return self._hash_basis()==other._hash_basis()\n \n def _bind(self,args,kwargs,*,partial=False ):\n ''\n \n arguments=OrderedDict()\n \n parameters=iter(self.parameters.values())\n parameters_ex=()\n arg_vals=iter(args)\n \n while True :\n \n \n try :\n arg_val=next(arg_vals)\n except StopIteration:\n \n try :\n param=next(parameters)\n except StopIteration:\n \n \n break\n else :\n if param.kind ==_VAR_POSITIONAL:\n \n \n break\n elif param.name in kwargs:\n if param.kind ==_POSITIONAL_ONLY:\n msg='{arg!r} parameter is positional only, '\\\n 'but was passed as a keyword'\n msg=msg.format(arg=param.name)\n raise TypeError(msg)from None\n parameters_ex=(param,)\n break\n elif (param.kind ==_VAR_KEYWORD or\n param.default is not _empty):\n \n \n \n parameters_ex=(param,)\n break\n else :\n \n \n if partial:\n parameters_ex=(param,)\n break\n else :\n msg='missing a required argument: {arg!r}'\n msg=msg.format(arg=param.name)\n raise TypeError(msg)from None\n else :\n \n try :\n param=next(parameters)\n except StopIteration:\n raise TypeError('too many positional arguments')from None\n else :\n if param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n \n \n raise TypeError(\n 'too many positional arguments')from None\n \n if param.kind ==_VAR_POSITIONAL:\n \n \n \n values=[arg_val]\n values.extend(arg_vals)\n arguments[param.name]=tuple(values)\n break\n \n if param.name in kwargs:\n raise TypeError(\n 'multiple values for argument {arg!r}'.format(\n arg=param.name))from None\n \n arguments[param.name]=arg_val\n \n \n \n kwargs_param=None\n for param in itertools.chain(parameters_ex,parameters):\n if param.kind ==_VAR_KEYWORD:\n \n kwargs_param=param\n continue\n \n if param.kind ==_VAR_POSITIONAL:\n \n \n \n continue\n \n param_name=param.name\n try :\n arg_val=kwargs.pop(param_name)\n except KeyError:\n \n \n \n \n if (not partial and param.kind !=_VAR_POSITIONAL and\n param.default is _empty):\n raise TypeError('missing a required argument: {arg!r}'.\\\n format(arg=param_name))from None\n \n else :\n if param.kind ==_POSITIONAL_ONLY:\n \n \n \n raise TypeError('{arg!r} parameter is positional only, '\n 'but was passed as a keyword'.\\\n format(arg=param.name))\n \n arguments[param_name]=arg_val\n \n if kwargs:\n if kwargs_param is not None :\n \n arguments[kwargs_param.name]=kwargs\n else :\n raise TypeError(\n 'got an unexpected keyword argument {arg!r}'.format(\n arg=next(iter(kwargs))))\n \n return self._bound_arguments_cls(self,arguments)\n \n def bind(*args,**kwargs):\n ''\n\n\n \n return args[0]._bind(args[1:],kwargs)\n \n def bind_partial(*args,**kwargs):\n ''\n\n\n \n return args[0]._bind(args[1:],kwargs,partial=True )\n \n def __reduce__(self):\n return (type(self),\n (tuple(self._parameters.values()),),\n {'_return_annotation':self._return_annotation})\n \n def __setstate__(self,state):\n self._return_annotation=state['_return_annotation']\n \n def __repr__(self):\n return '<{} {}>'.format(self.__class__.__name__,self)\n \n def __str__(self):\n result=[]\n render_pos_only_separator=False\n render_kw_only_separator=True\n for param in self.parameters.values():\n formatted=str(param)\n \n kind=param.kind\n \n if kind ==_POSITIONAL_ONLY:\n render_pos_only_separator=True\n elif render_pos_only_separator:\n \n \n result.append('/')\n render_pos_only_separator=False\n \n if kind ==_VAR_POSITIONAL:\n \n \n render_kw_only_separator=False\n elif kind ==_KEYWORD_ONLY and render_kw_only_separator:\n \n \n \n result.append('*')\n \n \n render_kw_only_separator=False\n \n result.append(formatted)\n \n if render_pos_only_separator:\n \n \n result.append('/')\n \n rendered='({})'.format(', '.join(result))\n \n if self.return_annotation is not _empty:\n anno=formatannotation(self.return_annotation)\n rendered +=' -> {}'.format(anno)\n \n return rendered\n \n \ndef signature(obj,*,follow_wrapped=True ):\n ''\n return Signature.from_callable(obj,follow_wrapped=follow_wrapped)\n \n \ndef _main():\n ''\n import argparse\n import importlib\n \n parser=argparse.ArgumentParser()\n parser.add_argument(\n 'object',\n help=\"The object to be analysed. \"\n \"It supports the 'module:qualname' syntax\")\n parser.add_argument(\n '-d','--details',action='store_true',\n help='Display info about the module rather than its source code')\n \n args=parser.parse_args()\n \n target=args.object\n mod_name,has_attrs,attrs=target.partition(\":\")\n try :\n obj=module=importlib.import_module(mod_name)\n except Exception as exc:\n msg=\"Failed to import {} ({}: {})\".format(mod_name,\n type(exc).__name__,\n exc)\n print(msg,file=sys.stderr)\n exit(2)\n \n if has_attrs:\n parts=attrs.split(\".\")\n obj=module\n for part in parts:\n obj=getattr(obj,part)\n \n if module.__name__ in sys.builtin_module_names:\n print(\"Can't get info for builtin modules.\",file=sys.stderr)\n exit(1)\n \n if args.details:\n print('Target: {}'.format(target))\n print('Origin: {}'.format(getsourcefile(module)))\n print('Cached: {}'.format(module.__cached__))\n if obj is module:\n print('Loader: {}'.format(repr(module.__loader__)))\n if hasattr(module,'__path__'):\n print('Submodule search path: {}'.format(module.__path__))\n else :\n try :\n __,lineno=findsource(obj)\n except Exception:\n pass\n else :\n print('Line: {}'.format(lineno))\n \n print('\\n')\n else :\n print(getsource(obj))\n \n \nif __name__ ==\"__main__\":\n _main()\n", ["abc", "argparse", "ast", "builtins", "collections", "collections.abc", "dis", "enum", "functools", "importlib", "importlib.machinery", "itertools", "linecache", "operator", "os", "re", "sys", "token", "tokenize", "types", "warnings"]], "io": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__=(\"Guido van Rossum , \"\n\"Mike Verdone , \"\n\"Mark Russell , \"\n\"Antoine Pitrou , \"\n\"Amaury Forgeot d'Arc , \"\n\"Benjamin Peterson \")\n\n__all__=[\"BlockingIOError\",\"open\",\"IOBase\",\"RawIOBase\",\"FileIO\",\n\"BytesIO\",\"StringIO\",\"BufferedIOBase\",\n\"BufferedReader\",\"BufferedWriter\",\"BufferedRWPair\",\n\"BufferedRandom\",\"TextIOBase\",\"TextIOWrapper\",\n\"UnsupportedOperation\",\"SEEK_SET\",\"SEEK_CUR\",\"SEEK_END\"]\n\n\nimport _io\nimport abc\n\nfrom _io import (DEFAULT_BUFFER_SIZE,BlockingIOError,UnsupportedOperation,\nopen,FileIO,BytesIO,StringIO,BufferedReader,\nBufferedWriter,BufferedRWPair,BufferedRandom,\nIncrementalNewlineDecoder,TextIOWrapper)\n\nOpenWrapper=_io.open\n\n\nUnsupportedOperation.__module__=\"io\"\n\n\nSEEK_SET=0\nSEEK_CUR=1\nSEEK_END=2\n\n\n\n\nclass IOBase(_io._IOBase,metaclass=abc.ABCMeta):\n __doc__=_io._IOBase.__doc__\n \nclass RawIOBase(_io._RawIOBase,IOBase):\n __doc__=_io._RawIOBase.__doc__\n \nclass BufferedIOBase(_io._BufferedIOBase,IOBase):\n __doc__=_io._BufferedIOBase.__doc__\n \nclass TextIOBase(_io._TextIOBase,IOBase):\n __doc__=_io._TextIOBase.__doc__\n \nRawIOBase.register(FileIO)\n\nfor klass in (BytesIO,BufferedReader,BufferedWriter,BufferedRandom,\nBufferedRWPair):\n BufferedIOBase.register(klass)\n \nfor klass in (StringIO,TextIOWrapper):\n TextIOBase.register(klass)\ndel klass\n\ntry :\n from _io import _WindowsConsoleIO\nexcept ImportError:\n pass\nelse :\n RawIOBase.register(_WindowsConsoleIO)\n", ["_io", "abc"]], "ipaddress": [".py", "\n\n\n\"\"\"A fast, lightweight IPv4/IPv6 manipulation library in Python.\n\nThis library is used to create/poke/manipulate IPv4 and IPv6 addresses\nand networks.\n\n\"\"\"\n\n__version__='1.0'\n\n\nimport functools\n\nIPV4LENGTH=32\nIPV6LENGTH=128\n\nclass AddressValueError(ValueError):\n ''\n \n \nclass NetmaskValueError(ValueError):\n ''\n \n \ndef ip_address(address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return IPv4Address(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n try :\n return IPv6Address(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n raise ValueError('%r does not appear to be an IPv4 or IPv6 address'%\n address)\n \n \ndef ip_network(address,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return IPv4Network(address,strict)\n except (AddressValueError,NetmaskValueError):\n pass\n \n try :\n return IPv6Network(address,strict)\n except (AddressValueError,NetmaskValueError):\n pass\n \n raise ValueError('%r does not appear to be an IPv4 or IPv6 network'%\n address)\n \n \ndef ip_interface(address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return IPv4Interface(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n try :\n return IPv6Interface(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n raise ValueError('%r does not appear to be an IPv4 or IPv6 interface'%\n address)\n \n \ndef v4_int_to_packed(address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return address.to_bytes(4,'big')\n except OverflowError:\n raise ValueError(\"Address negative or too large for IPv4\")\n \n \ndef v6_int_to_packed(address):\n ''\n\n\n\n\n\n\n\n \n try :\n return address.to_bytes(16,'big')\n except OverflowError:\n raise ValueError(\"Address negative or too large for IPv6\")\n \n \ndef _split_optional_netmask(address):\n ''\n addr=str(address).split('/')\n if len(addr)>2:\n raise AddressValueError(\"Only one '/' permitted in %r\"%address)\n return addr\n \n \ndef _find_address_range(addresses):\n ''\n\n\n\n\n\n\n\n \n it=iter(addresses)\n first=last=next(it)\n for ip in it:\n if ip._ip !=last._ip+1:\n yield first,last\n first=ip\n last=ip\n yield first,last\n \n \ndef _count_righthand_zero_bits(number,bits):\n ''\n\n\n\n\n\n\n\n\n \n if number ==0:\n return bits\n return min(bits,(~number&(number -1)).bit_length())\n \n \ndef summarize_address_range(first,last):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if (not (isinstance(first,_BaseAddress)and\n isinstance(last,_BaseAddress))):\n raise TypeError('first and last must be IP addresses, not networks')\n if first.version !=last.version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n first,last))\n if first >last:\n raise ValueError('last IP address must be greater than first')\n \n if first.version ==4:\n ip=IPv4Network\n elif first.version ==6:\n ip=IPv6Network\n else :\n raise ValueError('unknown IP version')\n \n ip_bits=first._max_prefixlen\n first_int=first._ip\n last_int=last._ip\n while first_int <=last_int:\n nbits=min(_count_righthand_zero_bits(first_int,ip_bits),\n (last_int -first_int+1).bit_length()-1)\n net=ip((first_int,ip_bits -nbits))\n yield net\n first_int +=1 <=net.broadcast_address:\n continue\n yield net\n last=net\n \n \ndef collapse_addresses(addresses):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n addrs=[]\n ips=[]\n nets=[]\n \n \n for ip in addresses:\n if isinstance(ip,_BaseAddress):\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n ips.append(ip)\n elif ip._prefixlen ==ip._max_prefixlen:\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n try :\n ips.append(ip.ip)\n except AttributeError:\n ips.append(ip.network_address)\n else :\n if nets and nets[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,nets[-1]))\n nets.append(ip)\n \n \n ips=sorted(set(ips))\n \n \n if ips:\n for first,last in _find_address_range(ips):\n addrs.extend(summarize_address_range(first,last))\n \n return _collapse_addresses_internal(addrs+nets)\n \n \ndef get_mixed_type_key(obj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(obj,_BaseNetwork):\n return obj._get_networks_key()\n elif isinstance(obj,_BaseAddress):\n return obj._get_address_key()\n return NotImplemented\n \n \nclass _IPAddressBase:\n\n ''\n \n __slots__=()\n \n @property\n def exploded(self):\n ''\n return self._explode_shorthand_ip_string()\n \n @property\n def compressed(self):\n ''\n return str(self)\n \n @property\n def reverse_pointer(self):\n ''\n\n\n\n\n\n \n return self._reverse_pointer()\n \n @property\n def version(self):\n msg='%200s has no version specified'%(type(self),)\n raise NotImplementedError(msg)\n \n def _check_int_address(self,address):\n if address <0:\n msg=\"%d (< 0) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._version))\n if address >self._ALL_ONES:\n msg=\"%d (>= 2**%d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._max_prefixlen,\n self._version))\n \n def _check_packed_address(self,address,expected_len):\n address_len=len(address)\n if address_len !=expected_len:\n msg=\"%r (len %d != %d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,address_len,\n expected_len,self._version))\n \n @classmethod\n def _ip_int_from_prefix(cls,prefixlen):\n ''\n\n\n\n\n\n\n\n \n return cls._ALL_ONES ^(cls._ALL_ONES >>prefixlen)\n \n @classmethod\n def _prefix_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n\n\n \n trailing_zeroes=_count_righthand_zero_bits(ip_int,\n cls._max_prefixlen)\n prefixlen=cls._max_prefixlen -trailing_zeroes\n leading_ones=ip_int >>trailing_zeroes\n all_ones=(1 <=0:\n if network+n >broadcast:\n raise IndexError('address out of range')\n return self._address_class(network+n)\n else :\n n +=1\n if broadcast+n other.network_address:\n return 1\n \n if self.netmask other.netmask:\n return 1\n return 0\n \n def _get_networks_key(self):\n ''\n\n\n\n\n\n \n return (self._version,self.network_address,self.netmask)\n \n def subnets(self,prefixlen_diff=1,new_prefix=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==self._max_prefixlen:\n yield self\n return\n \n if new_prefix is not None :\n if new_prefix 0')\n new_prefixlen=self._prefixlen+prefixlen_diff\n \n if new_prefixlen >self._max_prefixlen:\n raise ValueError(\n 'prefix length diff %d is invalid for netblock %s'%(\n new_prefixlen,self))\n \n start=int(self.network_address)\n end=int(self.broadcast_address)+1\n step=(int(self.hostmask)+1)>>prefixlen_diff\n for new_addr in range(start,end,step):\n current=self.__class__((new_addr,new_prefixlen))\n yield current\n \n def supernet(self,prefixlen_diff=1,new_prefix=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==0:\n return self\n \n if new_prefix is not None :\n if new_prefix >self._prefixlen:\n raise ValueError('new prefix must be shorter')\n if prefixlen_diff !=1:\n raise ValueError('cannot set prefixlen_diff and new_prefix')\n prefixlen_diff=self._prefixlen -new_prefix\n \n new_prefixlen=self.prefixlen -prefixlen_diff\n if new_prefixlen <0:\n raise ValueError(\n 'current prefixlen is %d, cannot have a prefixlen_diff of %d'%\n (self.prefixlen,prefixlen_diff))\n return self.__class__((\n int(self.network_address)&(int(self.netmask)<=a.broadcast_address)\n except AttributeError:\n raise TypeError(f\"Unable to test subnet containment \"\n f\"between {a} and {b}\")\n \n def subnet_of(self,other):\n ''\n return self._is_subnet_of(self,other)\n \n def supernet_of(self,other):\n ''\n return self._is_subnet_of(other,self)\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_reserved and\n self.broadcast_address.is_reserved)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return (self.network_address.is_link_local and\n self.broadcast_address.is_link_local)\n \n @property\n def is_private(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_private and\n self.broadcast_address.is_private)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_unspecified and\n self.broadcast_address.is_unspecified)\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_loopback and\n self.broadcast_address.is_loopback)\n \n \nclass _BaseV4:\n\n ''\n\n\n\n\n \n \n __slots__=()\n _version=4\n \n _ALL_ONES=(2 **IPV4LENGTH)-1\n _DECIMAL_DIGITS=frozenset('0123456789')\n \n \n _valid_mask_octets=frozenset({255,254,252,248,240,224,192,128,0})\n \n _max_prefixlen=IPV4LENGTH\n \n \n _netmask_cache={}\n \n def _explode_shorthand_ip_string(self):\n return str(self)\n \n @classmethod\n def _make_netmask(cls,arg):\n ''\n\n\n\n\n\n \n if arg not in cls._netmask_cache:\n if isinstance(arg,int):\n prefixlen=arg\n else :\n try :\n \n prefixlen=cls._prefix_from_prefix_string(arg)\n except NetmaskValueError:\n \n \n prefixlen=cls._prefix_from_ip_string(arg)\n netmask=IPv4Address(cls._ip_int_from_prefix(prefixlen))\n cls._netmask_cache[arg]=netmask,prefixlen\n return cls._netmask_cache[arg]\n \n @classmethod\n def _ip_int_from_string(cls,ip_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not ip_str:\n raise AddressValueError('Address cannot be empty')\n \n octets=ip_str.split('.')\n if len(octets)!=4:\n raise AddressValueError(\"Expected 4 octets in %r\"%ip_str)\n \n try :\n return int.from_bytes(map(cls._parse_octet,octets),'big')\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_octet(cls,octet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not octet_str:\n raise ValueError(\"Empty octet not permitted\")\n \n if not cls._DECIMAL_DIGITS.issuperset(octet_str):\n msg=\"Only decimal digits permitted in %r\"\n raise ValueError(msg %octet_str)\n \n \n if len(octet_str)>3:\n msg=\"At most 3 characters permitted in %r\"\n raise ValueError(msg %octet_str)\n \n octet_int=int(octet_str,10)\n \n \n \n if octet_int >7 and octet_str[0]=='0':\n msg=\"Ambiguous (octal/decimal) value in %r not permitted\"\n raise ValueError(msg %octet_str)\n if octet_int >255:\n raise ValueError(\"Octet %d (> 255) not permitted\"%octet_int)\n return octet_int\n \n @classmethod\n def _string_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n \n return '.'.join(map(str,ip_int.to_bytes(4,'big')))\n \n def _is_valid_netmask(self,netmask):\n ''\n\n\n\n\n\n\n\n\n\n \n mask=netmask.split('.')\n if len(mask)==4:\n try :\n for x in mask:\n if int(x)not in self._valid_mask_octets:\n return False\n except ValueError:\n \n return False\n for idx,y in enumerate(mask):\n if idx >0 and y >mask[idx -1]:\n return False\n return True\n try :\n netmask=int(netmask)\n except ValueError:\n return False\n return 0 <=netmask <=self._max_prefixlen\n \n def _is_hostmask(self,ip_str):\n ''\n\n\n\n\n\n\n\n \n bits=ip_str.split('.')\n try :\n parts=[x for x in map(int,bits)if x in self._valid_mask_octets]\n except ValueError:\n return False\n if len(parts)!=len(bits):\n return False\n if parts[0]1:\n self._prefixlen=int(address[1])\n else :\n self._prefixlen=self._max_prefixlen\n \n self.network=IPv4Network(address,strict=False )\n self.netmask=self.network.netmask\n self.hostmask=self.network.hostmask\n return\n \n addr=_split_optional_netmask(address)\n IPv4Address.__init__(self,addr[0])\n \n self.network=IPv4Network(address,strict=False )\n self._prefixlen=self.network._prefixlen\n \n self.netmask=self.network.netmask\n self.hostmask=self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(self._string_from_ip_int(self._ip),\n self.network.prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv4Address.__eq__(self,other)\n if not address_equal or address_equal is NotImplemented:\n return address_equal\n try :\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv4Address.__lt__(self,other)\n if address_less is NotImplemented:\n return NotImplemented\n try :\n return (self.network 1 else self._max_prefixlen\n \n \n else :\n args=_split_optional_netmask(address)\n addr=self._ip_int_from_string(args[0])\n mask=args[1]if len(args)==2 else self._max_prefixlen\n \n self.network_address=IPv4Address(addr)\n self.netmask,self._prefixlen=self._make_netmask(mask)\n packed=int(self.network_address)\n if packed&int(self.netmask)!=packed:\n if strict:\n raise ValueError('%s has host bits set'%self)\n else :\n self.network_address=IPv4Address(packed&\n int(self.netmask))\n \n if self._prefixlen ==(self._max_prefixlen -1):\n self.hosts=self.__iter__\n \n @property\n @functools.lru_cache()\n def is_global(self):\n ''\n\n\n\n\n\n \n return (not (self.network_address in IPv4Network('100.64.0.0/10')and\n self.broadcast_address in IPv4Network('100.64.0.0/10'))and\n not self.is_private)\n \n \nclass _IPv4Constants:\n _linklocal_network=IPv4Network('169.254.0.0/16')\n \n _loopback_network=IPv4Network('127.0.0.0/8')\n \n _multicast_network=IPv4Network('224.0.0.0/4')\n \n _public_network=IPv4Network('100.64.0.0/10')\n \n _private_networks=[\n IPv4Network('0.0.0.0/8'),\n IPv4Network('10.0.0.0/8'),\n IPv4Network('127.0.0.0/8'),\n IPv4Network('169.254.0.0/16'),\n IPv4Network('172.16.0.0/12'),\n IPv4Network('192.0.0.0/29'),\n IPv4Network('192.0.0.170/31'),\n IPv4Network('192.0.2.0/24'),\n IPv4Network('192.168.0.0/16'),\n IPv4Network('198.18.0.0/15'),\n IPv4Network('198.51.100.0/24'),\n IPv4Network('203.0.113.0/24'),\n IPv4Network('240.0.0.0/4'),\n IPv4Network('255.255.255.255/32'),\n ]\n \n _reserved_network=IPv4Network('240.0.0.0/4')\n \n _unspecified_address=IPv4Address('0.0.0.0')\n \n \nIPv4Address._constants=_IPv4Constants\n\n\nclass _BaseV6:\n\n ''\n\n\n\n\n \n \n __slots__=()\n _version=6\n _ALL_ONES=(2 **IPV6LENGTH)-1\n _HEXTET_COUNT=8\n _HEX_DIGITS=frozenset('0123456789ABCDEFabcdef')\n _max_prefixlen=IPV6LENGTH\n \n \n \n _netmask_cache={}\n \n @classmethod\n def _make_netmask(cls,arg):\n ''\n\n\n\n\n\n \n if arg not in cls._netmask_cache:\n if isinstance(arg,int):\n prefixlen=arg\n else :\n prefixlen=cls._prefix_from_prefix_string(arg)\n netmask=IPv6Address(cls._ip_int_from_prefix(prefixlen))\n cls._netmask_cache[arg]=netmask,prefixlen\n return cls._netmask_cache[arg]\n \n @classmethod\n def _ip_int_from_string(cls,ip_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not ip_str:\n raise AddressValueError('Address cannot be empty')\n \n parts=ip_str.split(':')\n \n \n _min_parts=3\n if len(parts)<_min_parts:\n msg=\"At least %d parts expected in %r\"%(_min_parts,ip_str)\n raise AddressValueError(msg)\n \n \n if '.'in parts[-1]:\n try :\n ipv4_int=IPv4Address(parts.pop())._ip\n except AddressValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n parts.append('%x'%((ipv4_int >>16)&0xFFFF))\n parts.append('%x'%(ipv4_int&0xFFFF))\n \n \n \n \n _max_parts=cls._HEXTET_COUNT+1\n if len(parts)>_max_parts:\n msg=\"At most %d colons permitted in %r\"%(_max_parts -1,ip_str)\n raise AddressValueError(msg)\n \n \n \n skip_index=None\n for i in range(1,len(parts)-1):\n if not parts[i]:\n if skip_index is not None :\n \n msg=\"At most one '::' permitted in %r\"%ip_str\n raise AddressValueError(msg)\n skip_index=i\n \n \n \n if skip_index is not None :\n \n parts_hi=skip_index\n parts_lo=len(parts)-skip_index -1\n if not parts[0]:\n parts_hi -=1\n if parts_hi:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n parts_lo -=1\n if parts_lo:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_skipped=cls._HEXTET_COUNT -(parts_hi+parts_lo)\n if parts_skipped <1:\n msg=\"Expected at most %d other parts with '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT -1,ip_str))\n else :\n \n \n \n if len(parts)!=cls._HEXTET_COUNT:\n msg=\"Exactly %d parts expected without '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT,ip_str))\n if not parts[0]:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_hi=len(parts)\n parts_lo=0\n parts_skipped=0\n \n try :\n \n ip_int=0\n for i in range(parts_hi):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n ip_int <<=16 *parts_skipped\n for i in range(-parts_lo,0):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n return ip_int\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_hextet(cls,hextet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not cls._HEX_DIGITS.issuperset(hextet_str):\n raise ValueError(\"Only hex digits permitted in %r\"%hextet_str)\n \n \n if len(hextet_str)>4:\n msg=\"At most 4 characters permitted in %r\"\n raise ValueError(msg %hextet_str)\n \n return int(hextet_str,16)\n \n @classmethod\n def _compress_hextets(cls,hextets):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n best_doublecolon_start=-1\n best_doublecolon_len=0\n doublecolon_start=-1\n doublecolon_len=0\n for index,hextet in enumerate(hextets):\n if hextet =='0':\n doublecolon_len +=1\n if doublecolon_start ==-1:\n \n doublecolon_start=index\n if doublecolon_len >best_doublecolon_len:\n \n best_doublecolon_len=doublecolon_len\n best_doublecolon_start=doublecolon_start\n else :\n doublecolon_len=0\n doublecolon_start=-1\n \n if best_doublecolon_len >1:\n best_doublecolon_end=(best_doublecolon_start+\n best_doublecolon_len)\n \n if best_doublecolon_end ==len(hextets):\n hextets +=['']\n hextets[best_doublecolon_start:best_doublecolon_end]=['']\n \n if best_doublecolon_start ==0:\n hextets=['']+hextets\n \n return hextets\n \n @classmethod\n def _string_from_ip_int(cls,ip_int=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if ip_int is None :\n ip_int=int(cls._ip)\n \n if ip_int >cls._ALL_ONES:\n raise ValueError('IPv6 address is too large')\n \n hex_str='%032x'%ip_int\n hextets=['%x'%int(hex_str[x:x+4],16)for x in range(0,32,4)]\n \n hextets=cls._compress_hextets(hextets)\n return ':'.join(hextets)\n \n def _explode_shorthand_ip_string(self):\n ''\n\n\n\n\n\n\n\n \n if isinstance(self,IPv6Network):\n ip_str=str(self.network_address)\n elif isinstance(self,IPv6Interface):\n ip_str=str(self.ip)\n else :\n ip_str=str(self)\n \n ip_int=self._ip_int_from_string(ip_str)\n hex_str='%032x'%ip_int\n parts=[hex_str[x:x+4]for x in range(0,32,4)]\n if isinstance(self,(_BaseNetwork,IPv6Interface)):\n return '%s/%d'%(':'.join(parts),self._prefixlen)\n return ':'.join(parts)\n \n def _reverse_pointer(self):\n ''\n\n\n\n \n reverse_chars=self.exploded[::-1].replace(':','')\n return '.'.join(reverse_chars)+'.ip6.arpa'\n \n @property\n def max_prefixlen(self):\n return self._max_prefixlen\n \n @property\n def version(self):\n return self._version\n \n \nclass IPv6Address(_BaseV6,_BaseAddress):\n\n ''\n \n __slots__=('_ip','__weakref__')\n \n def __init__(self,address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if isinstance(address,int):\n self._check_int_address(address)\n self._ip=address\n return\n \n \n if isinstance(address,bytes):\n self._check_packed_address(address,16)\n self._ip=int.from_bytes(address,'big')\n return\n \n \n \n addr_str=str(address)\n if '/'in addr_str:\n raise AddressValueError(\"Unexpected '/' in %r\"%address)\n self._ip=self._ip_int_from_string(addr_str)\n \n @property\n def packed(self):\n ''\n return v6_int_to_packed(self._ip)\n \n @property\n def is_multicast(self):\n ''\n\n\n\n\n\n \n return self in self._constants._multicast_network\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return any(self in x for x in self._constants._reserved_networks)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return self in self._constants._linklocal_network\n \n @property\n def is_site_local(self):\n ''\n\n\n\n\n\n\n\n\n \n return self in self._constants._sitelocal_network\n \n @property\n @functools.lru_cache()\n def is_private(self):\n ''\n\n\n\n\n\n \n return any(self in net for net in self._constants._private_networks)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return self._ip ==0\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return self._ip ==1\n \n @property\n def ipv4_mapped(self):\n ''\n\n\n\n\n\n \n if (self._ip >>32)!=0xFFFF:\n return None\n return IPv4Address(self._ip&0xFFFFFFFF)\n \n @property\n def teredo(self):\n ''\n\n\n\n\n\n\n \n if (self._ip >>96)!=0x20010000:\n return None\n return (IPv4Address((self._ip >>64)&0xFFFFFFFF),\n IPv4Address(~self._ip&0xFFFFFFFF))\n \n @property\n def sixtofour(self):\n ''\n\n\n\n\n\n \n if (self._ip >>112)!=0x2002:\n return None\n return IPv4Address((self._ip >>80)&0xFFFFFFFF)\n \n \nclass IPv6Interface(IPv6Address):\n\n def __init__(self,address):\n if isinstance(address,(bytes,int)):\n IPv6Address.__init__(self,address)\n self.network=IPv6Network(self._ip)\n self._prefixlen=self._max_prefixlen\n return\n if isinstance(address,tuple):\n IPv6Address.__init__(self,address[0])\n if len(address)>1:\n self._prefixlen=int(address[1])\n else :\n self._prefixlen=self._max_prefixlen\n self.network=IPv6Network(address,strict=False )\n self.netmask=self.network.netmask\n self.hostmask=self.network.hostmask\n return\n \n addr=_split_optional_netmask(address)\n IPv6Address.__init__(self,addr[0])\n self.network=IPv6Network(address,strict=False )\n self.netmask=self.network.netmask\n self._prefixlen=self.network._prefixlen\n self.hostmask=self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(self._string_from_ip_int(self._ip),\n self.network.prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv6Address.__eq__(self,other)\n if not address_equal or address_equal is NotImplemented:\n return address_equal\n try :\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv6Address.__lt__(self,other)\n if address_less is NotImplemented:\n return NotImplemented\n try :\n return (self.network 1 else self._max_prefixlen\n \n \n else :\n args=_split_optional_netmask(address)\n addr=self._ip_int_from_string(args[0])\n mask=args[1]if len(args)==2 else self._max_prefixlen\n \n self.network_address=IPv6Address(addr)\n self.netmask,self._prefixlen=self._make_netmask(mask)\n packed=int(self.network_address)\n if packed&int(self.netmask)!=packed:\n if strict:\n raise ValueError('%s has host bits set'%self)\n else :\n self.network_address=IPv6Address(packed&\n int(self.netmask))\n \n if self._prefixlen ==(self._max_prefixlen -1):\n self.hosts=self.__iter__\n \n def hosts(self):\n ''\n\n\n\n\n \n network=int(self.network_address)\n broadcast=int(self.broadcast_address)\n for x in range(network+1,broadcast+1):\n yield self._address_class(x)\n \n @property\n def is_site_local(self):\n ''\n\n\n\n\n\n\n\n\n \n return (self.network_address.is_site_local and\n self.broadcast_address.is_site_local)\n \n \nclass _IPv6Constants:\n\n _linklocal_network=IPv6Network('fe80::/10')\n \n _multicast_network=IPv6Network('ff00::/8')\n \n _private_networks=[\n IPv6Network('::1/128'),\n IPv6Network('::/128'),\n IPv6Network('::ffff:0:0/96'),\n IPv6Network('100::/64'),\n IPv6Network('2001::/23'),\n IPv6Network('2001:2::/48'),\n IPv6Network('2001:db8::/32'),\n IPv6Network('2001:10::/28'),\n IPv6Network('fc00::/7'),\n IPv6Network('fe80::/10'),\n ]\n \n _reserved_networks=[\n IPv6Network('::/8'),IPv6Network('100::/8'),\n IPv6Network('200::/7'),IPv6Network('400::/6'),\n IPv6Network('800::/5'),IPv6Network('1000::/4'),\n IPv6Network('4000::/3'),IPv6Network('6000::/3'),\n IPv6Network('8000::/3'),IPv6Network('A000::/3'),\n IPv6Network('C000::/3'),IPv6Network('E000::/4'),\n IPv6Network('F000::/5'),IPv6Network('F800::/6'),\n IPv6Network('FE00::/9'),\n ]\n \n _sitelocal_network=IPv6Network('fec0::/10')\n \n \nIPv6Address._constants=_IPv6Constants\n", ["functools"]], "itertools": [".py", "import operator\n\nclass accumulate:\n def __init__(self,iterable,func=operator.add):\n self.it=iter(iterable)\n self._total=None\n self.func=func\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if not self._total:\n self._total=next(self.it)\n return self._total\n else :\n element=next(self.it)\n try :\n self._total=self.func(self._total,element)\n except :\n raise TypeError(\"unsupported operand type\")\n return self._total\n \n \n \nclass chain:\n def __init__(self,*iterables):\n self._iterables_iter=iter(map(iter,iterables))\n \n self._cur_iterable_iter=iter([])\n \n def __iter__(self):\n return self\n \n def __next__(self):\n while True :\n try :\n return next(self._cur_iterable_iter)\n except StopIteration:\n self._cur_iterable_iter=next(self._iterables_iter)\n \n @classmethod\n def from_iterable(cls,iterable):\n for it in iterable:\n for element in it:\n yield element\n \nclass combinations:\n def __init__(self,iterable,r):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=r\n self.indices=list(range(self.r))\n self.zero=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if self.r >self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else :\n try :\n for i in reversed(range(self.r)):\n if self.indices[i]!=i+self.n -self.r:\n break\n self.indices[i]+=1\n for j in range(i+1,self.r):\n self.indices[j]=self.indices[j -1]+1\n return tuple(self.pool[i]for i in self.indices)\n except :\n raise StopIteration\n \nclass combinations_with_replacement:\n def __init__(self,iterable,r):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=r\n self.indices=[0]*self.r\n self.zero=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if not self.n and self.r:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else :\n try :\n for i in reversed(range(self.r)):\n if self.indices[i]!=self.n -1:\n break\n self.indices[i:]=[self.indices[i]+1]*(self.r -i)\n return tuple(self.pool[i]for i in self.indices)\n except :\n raise StopIteration\n \n \n \nclass compress:\n def __init__(self,data,selectors):\n self.data=iter(data)\n self.selectors=iter(selectors)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n while True :\n next_item=next(self.data)\n next_selector=next(self.selectors)\n if bool(next_selector):\n return next_item\n \n \n \n \nclass count:\n ''\n\n\n\n \n def __init__(self,start=0,step=1):\n if not isinstance(start,(int,float)):\n raise TypeError('a number is required')\n self.times=start -step\n self.step=step\n \n def __iter__(self):\n return self\n \n def __next__(self):\n self.times +=self.step\n return self.times\n \n def __repr__(self):\n return 'count(%d)'%(self.times+self.step)\n \n \n \nclass cycle:\n def __init__(self,iterable):\n self._cur_iter=iter(iterable)\n self._saved=[]\n self._must_save=True\n \n def __iter__(self):\n return self\n \n def __next__(self):\n try :\n next_elt=next(self._cur_iter)\n if self._must_save:\n self._saved.append(next_elt)\n except StopIteration:\n self._cur_iter=iter(self._saved)\n next_elt=next(self._cur_iter)\n self._must_save=False\n return next_elt\n \n \n \nclass dropwhile:\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n self._dropped=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if self._dropped:\n return value\n while self._predicate(value):\n value=next(self._iter)\n self._dropped=True\n return value\n \n \n \nclass filterfalse:\n def __init__(self,predicate,iterable):\n \n self._iter=iter(iterable)\n if predicate is None :\n self._predicate=bool\n else :\n self._predicate=predicate\n \n def __iter__(self):\n return self\n def __next__(self):\n next_elt=next(self._iter)\n while True :\n if not self._predicate(next_elt):\n return next_elt\n next_elt=next(self._iter)\n \nclass groupby:\n\n\n def __init__(self,iterable,key=None ):\n if key is None :\n key=lambda x:x\n self.keyfunc=key\n self.it=iter(iterable)\n self.tgtkey=self.currkey=self.currvalue=object()\n def __iter__(self):\n return self\n def __next__(self):\n while self.currkey ==self.tgtkey:\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n self.tgtkey=self.currkey\n return (self.currkey,self._grouper(self.tgtkey))\n def _grouper(self,tgtkey):\n while self.currkey ==tgtkey:\n yield self.currvalue\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n \n \n \nclass islice:\n def __init__(self,iterable,*args):\n s=slice(*args)\n self.start,self.stop,self.step=s.start or 0,s.stop,s.step\n if not isinstance(self.start,int):\n raise ValueError(\"Start argument must be an integer\")\n if self.stop !=None and not isinstance(self.stop,int):\n raise ValueError(\"Stop argument must be an integer or None\")\n if self.step is None :\n self.step=1\n if self.start <0 or (self.stop !=None and self.stop <0\n )or self.step <=0:\n raise ValueError(\"indices for islice() must be positive\")\n self.it=iter(iterable)\n self.donext=None\n self.cnt=0\n \n def __iter__(self):\n return self\n \n def __next__(self):\n nextindex=self.start\n if self.stop !=None and nextindex >=self.stop:\n raise StopIteration\n while self.cnt <=nextindex:\n nextitem=next(self.it)\n self.cnt +=1\n self.start +=self.step\n return nextitem\n \nclass permutations:\n def __init__(self,iterable,r=None ):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=self.n if r is None else r\n self.indices=list(range(self.n))\n self.cycles=list(range(self.n,self.n -self.r,-1))\n self.zero=False\n self.stop=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n indices=self.indices\n if self.r >self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in indices[:self.r])\n \n i=self.r -1\n while i >=0:\n j=self.cycles[i]-1\n if j >0:\n self.cycles[i]=j\n indices[i],indices[-j]=indices[-j],indices[i]\n return tuple(self.pool[i]for i in indices[:self.r])\n self.cycles[i]=len(indices)-i\n n1=len(indices)-1\n assert n1 >=0\n num=indices[i]\n for k in range(i,n1):\n indices[k]=indices[k+1]\n indices[n1]=num\n i -=1\n raise StopIteration\n \n \ndef product(*args,repeat=1):\n\n\n pools=[tuple(pool)for pool in args]*repeat\n result=[[]]\n for pool in pools:\n result=[x+[y]for x in result for y in pool]\n for prod in result:\n yield tuple(prod)\n \n \n \n \n \n \n \n \nclass _product:\n def __init__(self,*args,**kw):\n if len(kw)>1:\n raise TypeError(\"product() takes at most 1 argument (%d given)\"%\n len(kw))\n self.repeat=kw.get('repeat',1)\n if not isinstance(self.repeat,int):\n raise TypeError(\"integer argument expected, got %s\"%\n type(self.repeat))\n self.gears=[x for x in args]*self.repeat\n self.num_gears=len(self.gears)\n \n self.indicies=[(0,len(self.gears[x]))\n for x in range(0,self.num_gears)]\n self.cont=True\n self.zero=False\n \n def roll_gears(self):\n \n \n \n should_carry=True\n for n in range(0,self.num_gears):\n nth_gear=self.num_gears -n -1\n if should_carry:\n count,lim=self.indicies[nth_gear]\n count +=1\n if count ==lim and nth_gear ==0:\n self.cont=False\n if count ==lim:\n should_carry=True\n count=0\n else :\n should_carry=False\n self.indicies[nth_gear]=(count,lim)\n else :\n break\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if self.zero:\n raise StopIteration\n if self.repeat >0:\n if not self.cont:\n raise StopIteration\n l=[]\n for x in range(0,self.num_gears):\n index,limit=self.indicies[x]\n print('itertools 353',self.gears,x,index)\n l.append(self.gears[x][index])\n self.roll_gears()\n return tuple(l)\n elif self.repeat ==0:\n self.zero=True\n return ()\n else :\n raise ValueError(\"repeat argument cannot be negative\")\n \n \n \nclass repeat:\n def __init__(self,obj,times=None ):\n self._obj=obj\n if times is not None :\n range(times)\n if times <0:\n times=0\n self._times=times\n \n def __iter__(self):\n return self\n \n def __next__(self):\n \n if self._times is not None :\n if self._times <=0:\n raise StopIteration()\n self._times -=1\n return self._obj\n \n def __repr__(self):\n if self._times is not None :\n return 'repeat(%r, %r)'%(self._obj,self._times)\n else :\n return 'repeat(%r)'%(self._obj,)\n \n def __len__(self):\n if self._times ==-1 or self._times is None :\n raise TypeError(\"len() of uniszed object\")\n return self._times\n \n \n \nclass starmap(object):\n def __init__(self,function,iterable):\n self._func=function\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n t=next(self._iter)\n return self._func(*t)\n \n \n \nclass takewhile(object):\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if not self._predicate(value):\n raise StopIteration()\n return value\n \n \n \nclass TeeData(object):\n def __init__(self,iterator):\n self.data=[]\n self._iter=iterator\n \n def __getitem__(self,i):\n \n while i >=len(self.data):\n self.data.append(next(self._iter))\n return self.data[i]\n \n \nclass TeeObject(object):\n def __init__(self,iterable=None ,tee_data=None ):\n if tee_data:\n self.tee_data=tee_data\n self.pos=0\n \n elif isinstance(iterable,TeeObject):\n self.tee_data=iterable.tee_data\n self.pos=iterable.pos\n else :\n self.tee_data=TeeData(iter(iterable))\n self.pos=0\n \n def __next__(self):\n data=self.tee_data[self.pos]\n self.pos +=1\n return data\n \n def __iter__(self):\n return self\n \n \ndef tee(iterable,n=2):\n if isinstance(iterable,TeeObject):\n return tuple([iterable]+\n [TeeObject(tee_data=iterable.tee_data)for i in range(n -1)])\n tee_data=TeeData(iter(iterable))\n return tuple([TeeObject(tee_data=tee_data)for i in range(n)])\n \nclass zip_longest:\n def __init__(self,*args,fillvalue=None ):\n self.args=[iter(arg)for arg in args]\n self.fillvalue=fillvalue\n self.units=len(args)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n temp=[]\n nb=0\n for i in range(self.units):\n try :\n temp.append(next(self.args[i]))\n nb +=1\n except StopIteration:\n temp.append(self.fillvalue)\n if nb ==0:\n raise StopIteration\n return tuple(temp)\n", ["operator"]], "keyword": [".py", "#! /usr/bin/env python3\n\n\"\"\"Keywords (from \"graminit.c\")\n\nThis file is automatically generated; please don't muck it up!\n\nTo update the symbols in this file, 'cd' to the top directory of\nthe python source tree after building the interpreter and run:\n\n ./python Lib/keyword.py\n\"\"\"\n\n__all__=[\"iskeyword\",\"kwlist\"]\n\nkwlist=[\n\n'False',\n'None',\n'True',\n'and',\n'as',\n'assert',\n'async',\n'await',\n'break',\n'class',\n'continue',\n'def',\n'del',\n'elif',\n'else',\n'except',\n'finally',\n'for',\n'from',\n'global',\n'if',\n'import',\n'in',\n'is',\n'lambda',\n'nonlocal',\n'not',\n'or',\n'pass',\n'raise',\n'return',\n'try',\n'while',\n'with',\n'yield',\n\n]\n\niskeyword=frozenset(kwlist).__contains__\n\ndef main():\n import sys,re\n \n args=sys.argv[1:]\n iptfile=args and args[0]or \"Python/graminit.c\"\n if len(args)>1:optfile=args[1]\n else :optfile=\"Lib/keyword.py\"\n \n \n \n with open(optfile,newline='')as fp:\n format=fp.readlines()\n nl=format[0][len(format[0].strip()):]if format else '\\n'\n \n \n with open(iptfile)as fp:\n strprog=re.compile('\"([^\"]+)\"')\n lines=[]\n for line in fp:\n if '{1, \"'in line:\n match=strprog.search(line)\n if match:\n lines.append(\" '\"+match.group(1)+\"',\"+nl)\n lines.sort()\n \n \n try :\n start=format.index(\"#--start keywords--\"+nl)+1\n end=format.index(\"#--end keywords--\"+nl)\n format[start:end]=lines\n except ValueError:\n sys.stderr.write(\"target does not contain format markers\\n\")\n sys.exit(1)\n \n \n with open(optfile,'w',newline='')as fp:\n fp.writelines(format)\n \nif __name__ ==\"__main__\":\n main()\n", ["re", "sys"]], "linecache": [".py", "''\n\n\n\n\n\n\nimport functools\nimport sys\nimport os\nimport tokenize\n\n__all__=[\"getline\",\"clearcache\",\"checkcache\"]\n\ndef getline(filename,lineno,module_globals=None ):\n lines=getlines(filename,module_globals)\n if 1 <=lineno <=len(lines):\n return lines[lineno -1]\n else :\n return ''\n \n \n \n \n \n \ncache={}\n\n\ndef clearcache():\n ''\n \n global cache\n cache={}\n \n \ndef getlines(filename,module_globals=None ):\n ''\n \n \n if filename in cache:\n entry=cache[filename]\n if len(entry)!=1:\n return cache[filename][2]\n \n try :\n return updatecache(filename,module_globals)\n except MemoryError:\n clearcache()\n return []\n \n \ndef checkcache(filename=None ):\n ''\n \n \n if filename is None :\n filenames=list(cache.keys())\n else :\n if filename in cache:\n filenames=[filename]\n else :\n return\n \n for filename in filenames:\n entry=cache[filename]\n if len(entry)==1:\n \n continue\n size,mtime,lines,fullname=entry\n if mtime is None :\n continue\n try :\n stat=os.stat(fullname)\n except OSError:\n del cache[filename]\n continue\n if size !=stat.st_size or mtime !=stat.st_mtime:\n del cache[filename]\n \n \ndef updatecache(filename,module_globals=None ):\n ''\n\n \n \n if filename in cache:\n if len(cache[filename])!=1:\n del cache[filename]\n if not filename or (filename.startswith('<')and filename.endswith('>')):\n return []\n \n fullname=filename\n try :\n stat=os.stat(fullname)\n except OSError:\n basename=filename\n \n \n \n if lazycache(filename,module_globals):\n try :\n data=cache[filename][0]()\n except (ImportError,OSError):\n pass\n else :\n if data is None :\n \n \n return []\n cache[filename]=(\n len(data),None ,\n [line+'\\n'for line in data.splitlines()],fullname\n )\n return cache[filename][2]\n \n \n \n if os.path.isabs(filename):\n return []\n \n for dirname in sys.path:\n try :\n fullname=os.path.join(dirname,basename)\n except (TypeError,AttributeError):\n \n continue\n try :\n stat=os.stat(fullname)\n break\n except OSError:\n pass\n else :\n return []\n try :\n with tokenize.open(fullname)as fp:\n lines=fp.readlines()\n except OSError:\n return []\n if lines and not lines[-1].endswith('\\n'):\n lines[-1]+='\\n'\n size,mtime=stat.st_size,stat.st_mtime\n cache[filename]=size,mtime,lines,fullname\n return lines\n \n \ndef lazycache(filename,module_globals):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if filename in cache:\n if len(cache[filename])==1:\n return True\n else :\n return False\n if not filename or (filename.startswith('<')and filename.endswith('>')):\n return False\n \n if module_globals and '__loader__'in module_globals:\n name=module_globals.get('__name__')\n loader=module_globals['__loader__']\n get_source=getattr(loader,'get_source',None )\n \n if name and get_source:\n get_lines=functools.partial(get_source,name)\n cache[filename]=(get_lines,)\n return True\n return False\n", ["functools", "os", "sys", "tokenize"]], "locale": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport encodings\nimport encodings.aliases\nimport re\nimport _collections_abc\nfrom builtins import str as _builtin_str\nimport functools\n\n\n\n\n\n\n\n__all__=[\"getlocale\",\"getdefaultlocale\",\"getpreferredencoding\",\"Error\",\n\"setlocale\",\"resetlocale\",\"localeconv\",\"strcoll\",\"strxfrm\",\n\"str\",\"atof\",\"atoi\",\"format\",\"format_string\",\"currency\",\n\"normalize\",\"LC_CTYPE\",\"LC_COLLATE\",\"LC_TIME\",\"LC_MONETARY\",\n\"LC_NUMERIC\",\"LC_ALL\",\"CHAR_MAX\"]\n\ndef _strcoll(a,b):\n ''\n\n \n return (a >b)-(a .*?)\\))?'\nr'(?P[-#0-9 +*.hlL]*?)[eEfFgGdiouxXcrs%]')\n\ndef _format(percent,value,grouping=False ,monetary=False ,*additional):\n if additional:\n formatted=percent %((value,)+additional)\n else :\n formatted=percent %value\n \n if percent[-1]in 'eEfFgG':\n seps=0\n parts=formatted.split('.')\n if grouping:\n parts[0],seps=_group(parts[0],monetary=monetary)\n decimal_point=localeconv()[monetary and 'mon_decimal_point'\n or 'decimal_point']\n formatted=decimal_point.join(parts)\n if seps:\n formatted=_strip_padding(formatted,seps)\n elif percent[-1]in 'diu':\n seps=0\n if grouping:\n formatted,seps=_group(formatted,monetary=monetary)\n if seps:\n formatted=_strip_padding(formatted,seps)\n return formatted\n \ndef format_string(f,val,grouping=False ,monetary=False ):\n ''\n\n\n\n\n \n percents=list(_percent_re.finditer(f))\n new_f=_percent_re.sub('%s',f)\n \n if isinstance(val,_collections_abc.Mapping):\n new_val=[]\n for perc in percents:\n if perc.group()[-1]=='%':\n new_val.append('%')\n else :\n new_val.append(_format(perc.group(),val,grouping,monetary))\n else :\n if not isinstance(val,tuple):\n val=(val,)\n new_val=[]\n i=0\n for perc in percents:\n if perc.group()[-1]=='%':\n new_val.append('%')\n else :\n starcount=perc.group('modifiers').count('*')\n new_val.append(_format(perc.group(),\n val[i],\n grouping,\n monetary,\n *val[i+1:i+1+starcount]))\n i +=(1+starcount)\n val=tuple(new_val)\n \n return new_f %val\n \ndef format(percent,value,grouping=False ,monetary=False ,*additional):\n ''\n import warnings\n warnings.warn(\n \"This method will be removed in a future version of Python. \"\n \"Use 'locale.format_string()' instead.\",\n DeprecationWarning,stacklevel=2\n )\n \n match=_percent_re.match(percent)\n if not match or len(match.group())!=len(percent):\n raise ValueError((\"format() must be given exactly one %%char \"\n \"format specifier, %s not valid\")%repr(percent))\n return _format(percent,value,grouping,monetary,*additional)\n \ndef currency(val,symbol=True ,grouping=False ,international=False ):\n ''\n \n conv=localeconv()\n \n \n digits=conv[international and 'int_frac_digits'or 'frac_digits']\n if digits ==127:\n raise ValueError(\"Currency formatting is not possible using \"\n \"the 'C' locale.\")\n \n s=_format('%%.%if'%digits,abs(val),grouping,monetary=True )\n \n s='<'+s+'>'\n \n if symbol:\n smb=conv[international and 'int_curr_symbol'or 'currency_symbol']\n precedes=conv[val <0 and 'n_cs_precedes'or 'p_cs_precedes']\n separated=conv[val <0 and 'n_sep_by_space'or 'p_sep_by_space']\n \n if precedes:\n s=smb+(separated and ' 'or '')+s\n else :\n s=s+(separated and ' 'or '')+smb\n \n sign_pos=conv[val <0 and 'n_sign_posn'or 'p_sign_posn']\n sign=conv[val <0 and 'negative_sign'or 'positive_sign']\n \n if sign_pos ==0:\n s='('+s+')'\n elif sign_pos ==1:\n s=sign+s\n elif sign_pos ==2:\n s=s+sign\n elif sign_pos ==3:\n s=s.replace('<',sign)\n elif sign_pos ==4:\n s=s.replace('>',sign)\n else :\n \n \n s=sign+s\n \n return s.replace('<','').replace('>','')\n \ndef str(val):\n ''\n return _format(\"%.12g\",val)\n \ndef delocalize(string):\n ''\n \n conv=localeconv()\n \n \n ts=conv['thousands_sep']\n if ts:\n string=string.replace(ts,'')\n \n \n dd=conv['decimal_point']\n if dd:\n string=string.replace(dd,'.')\n return string\n \ndef atof(string,func=float):\n ''\n return func(delocalize(string))\n \ndef atoi(string):\n ''\n return int(delocalize(string))\n \ndef _test():\n setlocale(LC_ALL,\"\")\n \n s1=format_string(\"%d\",123456789,1)\n print(s1,\"is\",atoi(s1))\n \n s1=str(3.14)\n print(s1,\"is\",atof(s1))\n \n \n \n \n \n \n \n \n_setlocale=setlocale\n\ndef _replace_encoding(code,encoding):\n if '.'in code:\n langname=code[:code.index('.')]\n else :\n langname=code\n \n norm_encoding=encodings.normalize_encoding(encoding)\n \n norm_encoding=encodings.aliases.aliases.get(norm_encoding.lower(),\n norm_encoding)\n \n encoding=norm_encoding\n norm_encoding=norm_encoding.lower()\n if norm_encoding in locale_encoding_alias:\n encoding=locale_encoding_alias[norm_encoding]\n else :\n norm_encoding=norm_encoding.replace('_','')\n norm_encoding=norm_encoding.replace('-','')\n if norm_encoding in locale_encoding_alias:\n encoding=locale_encoding_alias[norm_encoding]\n \n return langname+'.'+encoding\n \ndef _append_modifier(code,modifier):\n if modifier =='euro':\n if '.'not in code:\n return code+'.ISO8859-15'\n _,_,encoding=code.partition('.')\n if encoding in ('ISO8859-15','UTF-8'):\n return code\n if encoding =='ISO8859-1':\n return _replace_encoding(code,'ISO8859-15')\n return code+'@'+modifier\n \ndef normalize(localename):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n code=localename.lower()\n if ':'in code:\n \n code=code.replace(':','.')\n if '@'in code:\n code,modifier=code.split('@',1)\n else :\n modifier=''\n if '.'in code:\n langname,encoding=code.split('.')[:2]\n else :\n langname=code\n encoding=''\n \n \n lang_enc=langname\n if encoding:\n norm_encoding=encoding.replace('-','')\n norm_encoding=norm_encoding.replace('_','')\n lang_enc +='.'+norm_encoding\n lookup_name=lang_enc\n if modifier:\n lookup_name +='@'+modifier\n code=locale_alias.get(lookup_name,None )\n if code is not None :\n return code\n \n \n if modifier:\n \n code=locale_alias.get(lang_enc,None )\n if code is not None :\n \n if '@'not in code:\n return _append_modifier(code,modifier)\n if code.split('@',1)[1].lower()==modifier:\n return code\n \n \n if encoding:\n \n lookup_name=langname\n if modifier:\n lookup_name +='@'+modifier\n code=locale_alias.get(lookup_name,None )\n if code is not None :\n \n if '@'not in code:\n return _replace_encoding(code,encoding)\n code,modifier=code.split('@',1)\n return _replace_encoding(code,encoding)+'@'+modifier\n \n if modifier:\n \n code=locale_alias.get(langname,None )\n if code is not None :\n \n if '@'not in code:\n code=_replace_encoding(code,encoding)\n return _append_modifier(code,modifier)\n code,defmod=code.split('@',1)\n if defmod.lower()==modifier:\n return _replace_encoding(code,encoding)+'@'+defmod\n \n return localename\n \ndef _parse_localename(localename):\n\n ''\n\n\n\n\n\n\n\n\n\n\n \n code=normalize(localename)\n if '@'in code:\n \n code,modifier=code.split('@',1)\n if modifier =='euro'and '.'not in code:\n \n \n \n return code,'iso-8859-15'\n \n if '.'in code:\n return tuple(code.split('.')[:2])\n elif code =='C':\n return None ,None\n raise ValueError('unknown locale: %s'%localename)\n \ndef _build_localename(localetuple):\n\n ''\n\n\n\n\n \n try :\n language,encoding=localetuple\n \n if language is None :\n language='C'\n if encoding is None :\n return language\n else :\n return language+'.'+encoding\n except (TypeError,ValueError):\n raise TypeError('Locale must be None, a string, or an iterable of '\n 'two strings -- language code, encoding.')from None\n \ndef getdefaultlocale(envvars=('LC_ALL','LC_CTYPE','LANG','LANGUAGE')):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try :\n \n import _locale\n code,encoding=_locale._getdefaultlocale()\n except (ImportError,AttributeError):\n pass\n else :\n \n if sys.platform ==\"win32\"and code and code[:2]==\"0x\":\n \n code=windows_locale.get(int(code,0))\n \n \n return code,encoding\n \n \n import os\n lookup=os.environ.get\n for variable in envvars:\n localename=lookup(variable,None )\n if localename:\n if variable =='LANGUAGE':\n localename=localename.split(':')[0]\n break\n else :\n localename='C'\n return _parse_localename(localename)\n \n \ndef getlocale(category=LC_CTYPE):\n\n ''\n\n\n\n\n\n\n\n\n\n \n localename=_setlocale(category)\n if category ==LC_ALL and ';'in localename:\n raise TypeError('category LC_ALL is not supported')\n return _parse_localename(localename)\n \ndef setlocale(category,locale=None ):\n\n ''\n\n\n\n\n\n\n\n\n \n if locale and not isinstance(locale,_builtin_str):\n \n locale=normalize(_build_localename(locale))\n return _setlocale(category,locale)\n \ndef resetlocale(category=LC_ALL):\n\n ''\n\n\n\n\n \n _setlocale(category,_build_localename(getdefaultlocale()))\n \nif sys.platform.startswith(\"win\"):\n\n def getpreferredencoding(do_setlocale=True ):\n ''\n if sys.flags.utf8_mode:\n return 'UTF-8'\n import _bootlocale\n return _bootlocale.getpreferredencoding(False )\nelse :\n\n try :\n CODESET\n except NameError:\n if hasattr(sys,'getandroidapilevel'):\n \n \n def getpreferredencoding(do_setlocale=True ):\n return 'UTF-8'\n else :\n \n def getpreferredencoding(do_setlocale=True ):\n ''\n \n if sys.flags.utf8_mode:\n return 'UTF-8'\n res=getdefaultlocale()[1]\n if res is None :\n \n res='ascii'\n return res\n else :\n def getpreferredencoding(do_setlocale=True ):\n ''\n \n if sys.flags.utf8_mode:\n return 'UTF-8'\n import _bootlocale\n if do_setlocale:\n oldloc=setlocale(LC_CTYPE)\n try :\n setlocale(LC_CTYPE,\"\")\n except Error:\n pass\n result=_bootlocale.getpreferredencoding(False )\n if do_setlocale:\n setlocale(LC_CTYPE,oldloc)\n return result\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \nlocale_encoding_alias={\n\n\n'437':'C',\n'c':'C',\n'en':'ISO8859-1',\n'jis':'JIS7',\n'jis7':'JIS7',\n'ajec':'eucJP',\n'koi8c':'KOI8-C',\n'microsoftcp1251':'CP1251',\n'microsoftcp1255':'CP1255',\n'microsoftcp1256':'CP1256',\n'88591':'ISO8859-1',\n'88592':'ISO8859-2',\n'88595':'ISO8859-5',\n'885915':'ISO8859-15',\n\n\n'ascii':'ISO8859-1',\n'latin_1':'ISO8859-1',\n'iso8859_1':'ISO8859-1',\n'iso8859_10':'ISO8859-10',\n'iso8859_11':'ISO8859-11',\n'iso8859_13':'ISO8859-13',\n'iso8859_14':'ISO8859-14',\n'iso8859_15':'ISO8859-15',\n'iso8859_16':'ISO8859-16',\n'iso8859_2':'ISO8859-2',\n'iso8859_3':'ISO8859-3',\n'iso8859_4':'ISO8859-4',\n'iso8859_5':'ISO8859-5',\n'iso8859_6':'ISO8859-6',\n'iso8859_7':'ISO8859-7',\n'iso8859_8':'ISO8859-8',\n'iso8859_9':'ISO8859-9',\n'iso2022_jp':'JIS7',\n'shift_jis':'SJIS',\n'tactis':'TACTIS',\n'euc_jp':'eucJP',\n'euc_kr':'eucKR',\n'utf_8':'UTF-8',\n'koi8_r':'KOI8-R',\n'koi8_t':'KOI8-T',\n'koi8_u':'KOI8-U',\n'kz1048':'RK1048',\n'cp1251':'CP1251',\n'cp1255':'CP1255',\n'cp1256':'CP1256',\n\n\n\n}\n\nfor k,v in sorted(locale_encoding_alias.items()):\n k=k.replace('_','')\n locale_encoding_alias.setdefault(k,v)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \nlocale_alias={\n'a3':'az_AZ.KOI8-C',\n'a3_az':'az_AZ.KOI8-C',\n'a3_az.koic':'az_AZ.KOI8-C',\n'aa_dj':'aa_DJ.ISO8859-1',\n'aa_er':'aa_ER.UTF-8',\n'aa_et':'aa_ET.UTF-8',\n'af':'af_ZA.ISO8859-1',\n'af_za':'af_ZA.ISO8859-1',\n'agr_pe':'agr_PE.UTF-8',\n'ak_gh':'ak_GH.UTF-8',\n'am':'am_ET.UTF-8',\n'am_et':'am_ET.UTF-8',\n'american':'en_US.ISO8859-1',\n'an_es':'an_ES.ISO8859-15',\n'anp_in':'anp_IN.UTF-8',\n'ar':'ar_AA.ISO8859-6',\n'ar_aa':'ar_AA.ISO8859-6',\n'ar_ae':'ar_AE.ISO8859-6',\n'ar_bh':'ar_BH.ISO8859-6',\n'ar_dz':'ar_DZ.ISO8859-6',\n'ar_eg':'ar_EG.ISO8859-6',\n'ar_in':'ar_IN.UTF-8',\n'ar_iq':'ar_IQ.ISO8859-6',\n'ar_jo':'ar_JO.ISO8859-6',\n'ar_kw':'ar_KW.ISO8859-6',\n'ar_lb':'ar_LB.ISO8859-6',\n'ar_ly':'ar_LY.ISO8859-6',\n'ar_ma':'ar_MA.ISO8859-6',\n'ar_om':'ar_OM.ISO8859-6',\n'ar_qa':'ar_QA.ISO8859-6',\n'ar_sa':'ar_SA.ISO8859-6',\n'ar_sd':'ar_SD.ISO8859-6',\n'ar_ss':'ar_SS.UTF-8',\n'ar_sy':'ar_SY.ISO8859-6',\n'ar_tn':'ar_TN.ISO8859-6',\n'ar_ye':'ar_YE.ISO8859-6',\n'arabic':'ar_AA.ISO8859-6',\n'as':'as_IN.UTF-8',\n'as_in':'as_IN.UTF-8',\n'ast_es':'ast_ES.ISO8859-15',\n'ayc_pe':'ayc_PE.UTF-8',\n'az':'az_AZ.ISO8859-9E',\n'az_az':'az_AZ.ISO8859-9E',\n'az_az.iso88599e':'az_AZ.ISO8859-9E',\n'az_ir':'az_IR.UTF-8',\n'be':'be_BY.CP1251',\n'be@latin':'be_BY.UTF-8@latin',\n'be_bg.utf8':'bg_BG.UTF-8',\n'be_by':'be_BY.CP1251',\n'be_by@latin':'be_BY.UTF-8@latin',\n'bem_zm':'bem_ZM.UTF-8',\n'ber_dz':'ber_DZ.UTF-8',\n'ber_ma':'ber_MA.UTF-8',\n'bg':'bg_BG.CP1251',\n'bg_bg':'bg_BG.CP1251',\n'bhb_in.utf8':'bhb_IN.UTF-8',\n'bho_in':'bho_IN.UTF-8',\n'bho_np':'bho_NP.UTF-8',\n'bi_vu':'bi_VU.UTF-8',\n'bn_bd':'bn_BD.UTF-8',\n'bn_in':'bn_IN.UTF-8',\n'bo_cn':'bo_CN.UTF-8',\n'bo_in':'bo_IN.UTF-8',\n'bokmal':'nb_NO.ISO8859-1',\n'bokm\\xe5l':'nb_NO.ISO8859-1',\n'br':'br_FR.ISO8859-1',\n'br_fr':'br_FR.ISO8859-1',\n'brx_in':'brx_IN.UTF-8',\n'bs':'bs_BA.ISO8859-2',\n'bs_ba':'bs_BA.ISO8859-2',\n'bulgarian':'bg_BG.CP1251',\n'byn_er':'byn_ER.UTF-8',\n'c':'C',\n'c-french':'fr_CA.ISO8859-1',\n'c.ascii':'C',\n'c.en':'C',\n'c.iso88591':'en_US.ISO8859-1',\n'c.utf8':'en_US.UTF-8',\n'c_c':'C',\n'c_c.c':'C',\n'ca':'ca_ES.ISO8859-1',\n'ca_ad':'ca_AD.ISO8859-1',\n'ca_es':'ca_ES.ISO8859-1',\n'ca_es@valencia':'ca_ES.UTF-8@valencia',\n'ca_fr':'ca_FR.ISO8859-1',\n'ca_it':'ca_IT.ISO8859-1',\n'catalan':'ca_ES.ISO8859-1',\n'ce_ru':'ce_RU.UTF-8',\n'cextend':'en_US.ISO8859-1',\n'chinese-s':'zh_CN.eucCN',\n'chinese-t':'zh_TW.eucTW',\n'chr_us':'chr_US.UTF-8',\n'ckb_iq':'ckb_IQ.UTF-8',\n'cmn_tw':'cmn_TW.UTF-8',\n'crh_ua':'crh_UA.UTF-8',\n'croatian':'hr_HR.ISO8859-2',\n'cs':'cs_CZ.ISO8859-2',\n'cs_cs':'cs_CZ.ISO8859-2',\n'cs_cz':'cs_CZ.ISO8859-2',\n'csb_pl':'csb_PL.UTF-8',\n'cv_ru':'cv_RU.UTF-8',\n'cy':'cy_GB.ISO8859-1',\n'cy_gb':'cy_GB.ISO8859-1',\n'cz':'cs_CZ.ISO8859-2',\n'cz_cz':'cs_CZ.ISO8859-2',\n'czech':'cs_CZ.ISO8859-2',\n'da':'da_DK.ISO8859-1',\n'da_dk':'da_DK.ISO8859-1',\n'danish':'da_DK.ISO8859-1',\n'dansk':'da_DK.ISO8859-1',\n'de':'de_DE.ISO8859-1',\n'de_at':'de_AT.ISO8859-1',\n'de_be':'de_BE.ISO8859-1',\n'de_ch':'de_CH.ISO8859-1',\n'de_de':'de_DE.ISO8859-1',\n'de_it':'de_IT.ISO8859-1',\n'de_li.utf8':'de_LI.UTF-8',\n'de_lu':'de_LU.ISO8859-1',\n'deutsch':'de_DE.ISO8859-1',\n'doi_in':'doi_IN.UTF-8',\n'dutch':'nl_NL.ISO8859-1',\n'dutch.iso88591':'nl_BE.ISO8859-1',\n'dv_mv':'dv_MV.UTF-8',\n'dz_bt':'dz_BT.UTF-8',\n'ee':'ee_EE.ISO8859-4',\n'ee_ee':'ee_EE.ISO8859-4',\n'eesti':'et_EE.ISO8859-1',\n'el':'el_GR.ISO8859-7',\n'el_cy':'el_CY.ISO8859-7',\n'el_gr':'el_GR.ISO8859-7',\n'el_gr@euro':'el_GR.ISO8859-15',\n'en':'en_US.ISO8859-1',\n'en_ag':'en_AG.UTF-8',\n'en_au':'en_AU.ISO8859-1',\n'en_be':'en_BE.ISO8859-1',\n'en_bw':'en_BW.ISO8859-1',\n'en_ca':'en_CA.ISO8859-1',\n'en_dk':'en_DK.ISO8859-1',\n'en_dl.utf8':'en_DL.UTF-8',\n'en_gb':'en_GB.ISO8859-1',\n'en_hk':'en_HK.ISO8859-1',\n'en_ie':'en_IE.ISO8859-1',\n'en_il':'en_IL.UTF-8',\n'en_in':'en_IN.ISO8859-1',\n'en_ng':'en_NG.UTF-8',\n'en_nz':'en_NZ.ISO8859-1',\n'en_ph':'en_PH.ISO8859-1',\n'en_sc.utf8':'en_SC.UTF-8',\n'en_sg':'en_SG.ISO8859-1',\n'en_uk':'en_GB.ISO8859-1',\n'en_us':'en_US.ISO8859-1',\n'en_us@euro@euro':'en_US.ISO8859-15',\n'en_za':'en_ZA.ISO8859-1',\n'en_zm':'en_ZM.UTF-8',\n'en_zw':'en_ZW.ISO8859-1',\n'en_zw.utf8':'en_ZS.UTF-8',\n'eng_gb':'en_GB.ISO8859-1',\n'english':'en_EN.ISO8859-1',\n'english.iso88591':'en_US.ISO8859-1',\n'english_uk':'en_GB.ISO8859-1',\n'english_united-states':'en_US.ISO8859-1',\n'english_united-states.437':'C',\n'english_us':'en_US.ISO8859-1',\n'eo':'eo_XX.ISO8859-3',\n'eo.utf8':'eo.UTF-8',\n'eo_eo':'eo_EO.ISO8859-3',\n'eo_us.utf8':'eo_US.UTF-8',\n'eo_xx':'eo_XX.ISO8859-3',\n'es':'es_ES.ISO8859-1',\n'es_ar':'es_AR.ISO8859-1',\n'es_bo':'es_BO.ISO8859-1',\n'es_cl':'es_CL.ISO8859-1',\n'es_co':'es_CO.ISO8859-1',\n'es_cr':'es_CR.ISO8859-1',\n'es_cu':'es_CU.UTF-8',\n'es_do':'es_DO.ISO8859-1',\n'es_ec':'es_EC.ISO8859-1',\n'es_es':'es_ES.ISO8859-1',\n'es_gt':'es_GT.ISO8859-1',\n'es_hn':'es_HN.ISO8859-1',\n'es_mx':'es_MX.ISO8859-1',\n'es_ni':'es_NI.ISO8859-1',\n'es_pa':'es_PA.ISO8859-1',\n'es_pe':'es_PE.ISO8859-1',\n'es_pr':'es_PR.ISO8859-1',\n'es_py':'es_PY.ISO8859-1',\n'es_sv':'es_SV.ISO8859-1',\n'es_us':'es_US.ISO8859-1',\n'es_uy':'es_UY.ISO8859-1',\n'es_ve':'es_VE.ISO8859-1',\n'estonian':'et_EE.ISO8859-1',\n'et':'et_EE.ISO8859-15',\n'et_ee':'et_EE.ISO8859-15',\n'eu':'eu_ES.ISO8859-1',\n'eu_es':'eu_ES.ISO8859-1',\n'eu_fr':'eu_FR.ISO8859-1',\n'fa':'fa_IR.UTF-8',\n'fa_ir':'fa_IR.UTF-8',\n'fa_ir.isiri3342':'fa_IR.ISIRI-3342',\n'ff_sn':'ff_SN.UTF-8',\n'fi':'fi_FI.ISO8859-15',\n'fi_fi':'fi_FI.ISO8859-15',\n'fil_ph':'fil_PH.UTF-8',\n'finnish':'fi_FI.ISO8859-1',\n'fo':'fo_FO.ISO8859-1',\n'fo_fo':'fo_FO.ISO8859-1',\n'fr':'fr_FR.ISO8859-1',\n'fr_be':'fr_BE.ISO8859-1',\n'fr_ca':'fr_CA.ISO8859-1',\n'fr_ch':'fr_CH.ISO8859-1',\n'fr_fr':'fr_FR.ISO8859-1',\n'fr_lu':'fr_LU.ISO8859-1',\n'fran\\xe7ais':'fr_FR.ISO8859-1',\n'fre_fr':'fr_FR.ISO8859-1',\n'french':'fr_FR.ISO8859-1',\n'french.iso88591':'fr_CH.ISO8859-1',\n'french_france':'fr_FR.ISO8859-1',\n'fur_it':'fur_IT.UTF-8',\n'fy_de':'fy_DE.UTF-8',\n'fy_nl':'fy_NL.UTF-8',\n'ga':'ga_IE.ISO8859-1',\n'ga_ie':'ga_IE.ISO8859-1',\n'galego':'gl_ES.ISO8859-1',\n'galician':'gl_ES.ISO8859-1',\n'gd':'gd_GB.ISO8859-1',\n'gd_gb':'gd_GB.ISO8859-1',\n'ger_de':'de_DE.ISO8859-1',\n'german':'de_DE.ISO8859-1',\n'german.iso88591':'de_CH.ISO8859-1',\n'german_germany':'de_DE.ISO8859-1',\n'gez_er':'gez_ER.UTF-8',\n'gez_et':'gez_ET.UTF-8',\n'gl':'gl_ES.ISO8859-1',\n'gl_es':'gl_ES.ISO8859-1',\n'greek':'el_GR.ISO8859-7',\n'gu_in':'gu_IN.UTF-8',\n'gv':'gv_GB.ISO8859-1',\n'gv_gb':'gv_GB.ISO8859-1',\n'ha_ng':'ha_NG.UTF-8',\n'hak_tw':'hak_TW.UTF-8',\n'he':'he_IL.ISO8859-8',\n'he_il':'he_IL.ISO8859-8',\n'hebrew':'he_IL.ISO8859-8',\n'hi':'hi_IN.ISCII-DEV',\n'hi_in':'hi_IN.ISCII-DEV',\n'hi_in.isciidev':'hi_IN.ISCII-DEV',\n'hif_fj':'hif_FJ.UTF-8',\n'hne':'hne_IN.UTF-8',\n'hne_in':'hne_IN.UTF-8',\n'hr':'hr_HR.ISO8859-2',\n'hr_hr':'hr_HR.ISO8859-2',\n'hrvatski':'hr_HR.ISO8859-2',\n'hsb_de':'hsb_DE.ISO8859-2',\n'ht_ht':'ht_HT.UTF-8',\n'hu':'hu_HU.ISO8859-2',\n'hu_hu':'hu_HU.ISO8859-2',\n'hungarian':'hu_HU.ISO8859-2',\n'hy_am':'hy_AM.UTF-8',\n'hy_am.armscii8':'hy_AM.ARMSCII_8',\n'ia':'ia.UTF-8',\n'ia_fr':'ia_FR.UTF-8',\n'icelandic':'is_IS.ISO8859-1',\n'id':'id_ID.ISO8859-1',\n'id_id':'id_ID.ISO8859-1',\n'ig_ng':'ig_NG.UTF-8',\n'ik_ca':'ik_CA.UTF-8',\n'in':'id_ID.ISO8859-1',\n'in_id':'id_ID.ISO8859-1',\n'is':'is_IS.ISO8859-1',\n'is_is':'is_IS.ISO8859-1',\n'iso-8859-1':'en_US.ISO8859-1',\n'iso-8859-15':'en_US.ISO8859-15',\n'iso8859-1':'en_US.ISO8859-1',\n'iso8859-15':'en_US.ISO8859-15',\n'iso_8859_1':'en_US.ISO8859-1',\n'iso_8859_15':'en_US.ISO8859-15',\n'it':'it_IT.ISO8859-1',\n'it_ch':'it_CH.ISO8859-1',\n'it_it':'it_IT.ISO8859-1',\n'italian':'it_IT.ISO8859-1',\n'iu':'iu_CA.NUNACOM-8',\n'iu_ca':'iu_CA.NUNACOM-8',\n'iu_ca.nunacom8':'iu_CA.NUNACOM-8',\n'iw':'he_IL.ISO8859-8',\n'iw_il':'he_IL.ISO8859-8',\n'iw_il.utf8':'iw_IL.UTF-8',\n'ja':'ja_JP.eucJP',\n'ja_jp':'ja_JP.eucJP',\n'ja_jp.euc':'ja_JP.eucJP',\n'ja_jp.mscode':'ja_JP.SJIS',\n'ja_jp.pck':'ja_JP.SJIS',\n'japan':'ja_JP.eucJP',\n'japanese':'ja_JP.eucJP',\n'japanese-euc':'ja_JP.eucJP',\n'japanese.euc':'ja_JP.eucJP',\n'jp_jp':'ja_JP.eucJP',\n'ka':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge.georgianacademy':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge.georgianps':'ka_GE.GEORGIAN-PS',\n'ka_ge.georgianrs':'ka_GE.GEORGIAN-ACADEMY',\n'kab_dz':'kab_DZ.UTF-8',\n'kk_kz':'kk_KZ.ptcp154',\n'kl':'kl_GL.ISO8859-1',\n'kl_gl':'kl_GL.ISO8859-1',\n'km_kh':'km_KH.UTF-8',\n'kn':'kn_IN.UTF-8',\n'kn_in':'kn_IN.UTF-8',\n'ko':'ko_KR.eucKR',\n'ko_kr':'ko_KR.eucKR',\n'ko_kr.euc':'ko_KR.eucKR',\n'kok_in':'kok_IN.UTF-8',\n'korean':'ko_KR.eucKR',\n'korean.euc':'ko_KR.eucKR',\n'ks':'ks_IN.UTF-8',\n'ks_in':'ks_IN.UTF-8',\n'ks_in@devanagari.utf8':'ks_IN.UTF-8@devanagari',\n'ku_tr':'ku_TR.ISO8859-9',\n'kw':'kw_GB.ISO8859-1',\n'kw_gb':'kw_GB.ISO8859-1',\n'ky':'ky_KG.UTF-8',\n'ky_kg':'ky_KG.UTF-8',\n'lb_lu':'lb_LU.UTF-8',\n'lg_ug':'lg_UG.ISO8859-10',\n'li_be':'li_BE.UTF-8',\n'li_nl':'li_NL.UTF-8',\n'lij_it':'lij_IT.UTF-8',\n'lithuanian':'lt_LT.ISO8859-13',\n'ln_cd':'ln_CD.UTF-8',\n'lo':'lo_LA.MULELAO-1',\n'lo_la':'lo_LA.MULELAO-1',\n'lo_la.cp1133':'lo_LA.IBM-CP1133',\n'lo_la.ibmcp1133':'lo_LA.IBM-CP1133',\n'lo_la.mulelao1':'lo_LA.MULELAO-1',\n'lt':'lt_LT.ISO8859-13',\n'lt_lt':'lt_LT.ISO8859-13',\n'lv':'lv_LV.ISO8859-13',\n'lv_lv':'lv_LV.ISO8859-13',\n'lzh_tw':'lzh_TW.UTF-8',\n'mag_in':'mag_IN.UTF-8',\n'mai':'mai_IN.UTF-8',\n'mai_in':'mai_IN.UTF-8',\n'mai_np':'mai_NP.UTF-8',\n'mfe_mu':'mfe_MU.UTF-8',\n'mg_mg':'mg_MG.ISO8859-15',\n'mhr_ru':'mhr_RU.UTF-8',\n'mi':'mi_NZ.ISO8859-1',\n'mi_nz':'mi_NZ.ISO8859-1',\n'miq_ni':'miq_NI.UTF-8',\n'mjw_in':'mjw_IN.UTF-8',\n'mk':'mk_MK.ISO8859-5',\n'mk_mk':'mk_MK.ISO8859-5',\n'ml':'ml_IN.UTF-8',\n'ml_in':'ml_IN.UTF-8',\n'mn_mn':'mn_MN.UTF-8',\n'mni_in':'mni_IN.UTF-8',\n'mr':'mr_IN.UTF-8',\n'mr_in':'mr_IN.UTF-8',\n'ms':'ms_MY.ISO8859-1',\n'ms_my':'ms_MY.ISO8859-1',\n'mt':'mt_MT.ISO8859-3',\n'mt_mt':'mt_MT.ISO8859-3',\n'my_mm':'my_MM.UTF-8',\n'nan_tw':'nan_TW.UTF-8',\n'nb':'nb_NO.ISO8859-1',\n'nb_no':'nb_NO.ISO8859-1',\n'nds_de':'nds_DE.UTF-8',\n'nds_nl':'nds_NL.UTF-8',\n'ne_np':'ne_NP.UTF-8',\n'nhn_mx':'nhn_MX.UTF-8',\n'niu_nu':'niu_NU.UTF-8',\n'niu_nz':'niu_NZ.UTF-8',\n'nl':'nl_NL.ISO8859-1',\n'nl_aw':'nl_AW.UTF-8',\n'nl_be':'nl_BE.ISO8859-1',\n'nl_nl':'nl_NL.ISO8859-1',\n'nn':'nn_NO.ISO8859-1',\n'nn_no':'nn_NO.ISO8859-1',\n'no':'no_NO.ISO8859-1',\n'no@nynorsk':'ny_NO.ISO8859-1',\n'no_no':'no_NO.ISO8859-1',\n'no_no.iso88591@bokmal':'no_NO.ISO8859-1',\n'no_no.iso88591@nynorsk':'no_NO.ISO8859-1',\n'norwegian':'no_NO.ISO8859-1',\n'nr':'nr_ZA.ISO8859-1',\n'nr_za':'nr_ZA.ISO8859-1',\n'nso':'nso_ZA.ISO8859-15',\n'nso_za':'nso_ZA.ISO8859-15',\n'ny':'ny_NO.ISO8859-1',\n'ny_no':'ny_NO.ISO8859-1',\n'nynorsk':'nn_NO.ISO8859-1',\n'oc':'oc_FR.ISO8859-1',\n'oc_fr':'oc_FR.ISO8859-1',\n'om_et':'om_ET.UTF-8',\n'om_ke':'om_KE.ISO8859-1',\n'or':'or_IN.UTF-8',\n'or_in':'or_IN.UTF-8',\n'os_ru':'os_RU.UTF-8',\n'pa':'pa_IN.UTF-8',\n'pa_in':'pa_IN.UTF-8',\n'pa_pk':'pa_PK.UTF-8',\n'pap_an':'pap_AN.UTF-8',\n'pap_aw':'pap_AW.UTF-8',\n'pap_cw':'pap_CW.UTF-8',\n'pd':'pd_US.ISO8859-1',\n'pd_de':'pd_DE.ISO8859-1',\n'pd_us':'pd_US.ISO8859-1',\n'ph':'ph_PH.ISO8859-1',\n'ph_ph':'ph_PH.ISO8859-1',\n'pl':'pl_PL.ISO8859-2',\n'pl_pl':'pl_PL.ISO8859-2',\n'polish':'pl_PL.ISO8859-2',\n'portuguese':'pt_PT.ISO8859-1',\n'portuguese_brazil':'pt_BR.ISO8859-1',\n'posix':'C',\n'posix-utf2':'C',\n'pp':'pp_AN.ISO8859-1',\n'pp_an':'pp_AN.ISO8859-1',\n'ps_af':'ps_AF.UTF-8',\n'pt':'pt_PT.ISO8859-1',\n'pt_br':'pt_BR.ISO8859-1',\n'pt_pt':'pt_PT.ISO8859-1',\n'quz_pe':'quz_PE.UTF-8',\n'raj_in':'raj_IN.UTF-8',\n'ro':'ro_RO.ISO8859-2',\n'ro_ro':'ro_RO.ISO8859-2',\n'romanian':'ro_RO.ISO8859-2',\n'ru':'ru_RU.UTF-8',\n'ru_ru':'ru_RU.UTF-8',\n'ru_ua':'ru_UA.KOI8-U',\n'rumanian':'ro_RO.ISO8859-2',\n'russian':'ru_RU.KOI8-R',\n'rw':'rw_RW.ISO8859-1',\n'rw_rw':'rw_RW.ISO8859-1',\n'sa_in':'sa_IN.UTF-8',\n'sat_in':'sat_IN.UTF-8',\n'sc_it':'sc_IT.UTF-8',\n'sd':'sd_IN.UTF-8',\n'sd_in':'sd_IN.UTF-8',\n'sd_in@devanagari.utf8':'sd_IN.UTF-8@devanagari',\n'sd_pk':'sd_PK.UTF-8',\n'se_no':'se_NO.UTF-8',\n'serbocroatian':'sr_RS.UTF-8@latin',\n'sgs_lt':'sgs_LT.UTF-8',\n'sh':'sr_RS.UTF-8@latin',\n'sh_ba.iso88592@bosnia':'sr_CS.ISO8859-2',\n'sh_hr':'sh_HR.ISO8859-2',\n'sh_hr.iso88592':'hr_HR.ISO8859-2',\n'sh_sp':'sr_CS.ISO8859-2',\n'sh_yu':'sr_RS.UTF-8@latin',\n'shn_mm':'shn_MM.UTF-8',\n'shs_ca':'shs_CA.UTF-8',\n'si':'si_LK.UTF-8',\n'si_lk':'si_LK.UTF-8',\n'sid_et':'sid_ET.UTF-8',\n'sinhala':'si_LK.UTF-8',\n'sk':'sk_SK.ISO8859-2',\n'sk_sk':'sk_SK.ISO8859-2',\n'sl':'sl_SI.ISO8859-2',\n'sl_cs':'sl_CS.ISO8859-2',\n'sl_si':'sl_SI.ISO8859-2',\n'slovak':'sk_SK.ISO8859-2',\n'slovene':'sl_SI.ISO8859-2',\n'slovenian':'sl_SI.ISO8859-2',\n'sm_ws':'sm_WS.UTF-8',\n'so_dj':'so_DJ.ISO8859-1',\n'so_et':'so_ET.UTF-8',\n'so_ke':'so_KE.ISO8859-1',\n'so_so':'so_SO.ISO8859-1',\n'sp':'sr_CS.ISO8859-5',\n'sp_yu':'sr_CS.ISO8859-5',\n'spanish':'es_ES.ISO8859-1',\n'spanish_spain':'es_ES.ISO8859-1',\n'sq':'sq_AL.ISO8859-2',\n'sq_al':'sq_AL.ISO8859-2',\n'sq_mk':'sq_MK.UTF-8',\n'sr':'sr_RS.UTF-8',\n'sr@cyrillic':'sr_RS.UTF-8',\n'sr@latn':'sr_CS.UTF-8@latin',\n'sr_cs':'sr_CS.UTF-8',\n'sr_cs.iso88592@latn':'sr_CS.ISO8859-2',\n'sr_cs@latn':'sr_CS.UTF-8@latin',\n'sr_me':'sr_ME.UTF-8',\n'sr_rs':'sr_RS.UTF-8',\n'sr_rs@latn':'sr_RS.UTF-8@latin',\n'sr_sp':'sr_CS.ISO8859-2',\n'sr_yu':'sr_RS.UTF-8@latin',\n'sr_yu.cp1251@cyrillic':'sr_CS.CP1251',\n'sr_yu.iso88592':'sr_CS.ISO8859-2',\n'sr_yu.iso88595':'sr_CS.ISO8859-5',\n'sr_yu.iso88595@cyrillic':'sr_CS.ISO8859-5',\n'sr_yu.microsoftcp1251@cyrillic':'sr_CS.CP1251',\n'sr_yu.utf8':'sr_RS.UTF-8',\n'sr_yu.utf8@cyrillic':'sr_RS.UTF-8',\n'sr_yu@cyrillic':'sr_RS.UTF-8',\n'ss':'ss_ZA.ISO8859-1',\n'ss_za':'ss_ZA.ISO8859-1',\n'st':'st_ZA.ISO8859-1',\n'st_za':'st_ZA.ISO8859-1',\n'sv':'sv_SE.ISO8859-1',\n'sv_fi':'sv_FI.ISO8859-1',\n'sv_se':'sv_SE.ISO8859-1',\n'sw_ke':'sw_KE.UTF-8',\n'sw_tz':'sw_TZ.UTF-8',\n'swedish':'sv_SE.ISO8859-1',\n'szl_pl':'szl_PL.UTF-8',\n'ta':'ta_IN.TSCII-0',\n'ta_in':'ta_IN.TSCII-0',\n'ta_in.tscii':'ta_IN.TSCII-0',\n'ta_in.tscii0':'ta_IN.TSCII-0',\n'ta_lk':'ta_LK.UTF-8',\n'tcy_in.utf8':'tcy_IN.UTF-8',\n'te':'te_IN.UTF-8',\n'te_in':'te_IN.UTF-8',\n'tg':'tg_TJ.KOI8-C',\n'tg_tj':'tg_TJ.KOI8-C',\n'th':'th_TH.ISO8859-11',\n'th_th':'th_TH.ISO8859-11',\n'th_th.tactis':'th_TH.TIS620',\n'th_th.tis620':'th_TH.TIS620',\n'thai':'th_TH.ISO8859-11',\n'the_np':'the_NP.UTF-8',\n'ti_er':'ti_ER.UTF-8',\n'ti_et':'ti_ET.UTF-8',\n'tig_er':'tig_ER.UTF-8',\n'tk_tm':'tk_TM.UTF-8',\n'tl':'tl_PH.ISO8859-1',\n'tl_ph':'tl_PH.ISO8859-1',\n'tn':'tn_ZA.ISO8859-15',\n'tn_za':'tn_ZA.ISO8859-15',\n'to_to':'to_TO.UTF-8',\n'tpi_pg':'tpi_PG.UTF-8',\n'tr':'tr_TR.ISO8859-9',\n'tr_cy':'tr_CY.ISO8859-9',\n'tr_tr':'tr_TR.ISO8859-9',\n'ts':'ts_ZA.ISO8859-1',\n'ts_za':'ts_ZA.ISO8859-1',\n'tt':'tt_RU.TATAR-CYR',\n'tt_ru':'tt_RU.TATAR-CYR',\n'tt_ru.tatarcyr':'tt_RU.TATAR-CYR',\n'tt_ru@iqtelif':'tt_RU.UTF-8@iqtelif',\n'turkish':'tr_TR.ISO8859-9',\n'ug_cn':'ug_CN.UTF-8',\n'uk':'uk_UA.KOI8-U',\n'uk_ua':'uk_UA.KOI8-U',\n'univ':'en_US.utf',\n'universal':'en_US.utf',\n'universal.utf8@ucs4':'en_US.UTF-8',\n'unm_us':'unm_US.UTF-8',\n'ur':'ur_PK.CP1256',\n'ur_in':'ur_IN.UTF-8',\n'ur_pk':'ur_PK.CP1256',\n'uz':'uz_UZ.UTF-8',\n'uz_uz':'uz_UZ.UTF-8',\n'uz_uz@cyrillic':'uz_UZ.UTF-8',\n've':'ve_ZA.UTF-8',\n've_za':'ve_ZA.UTF-8',\n'vi':'vi_VN.TCVN',\n'vi_vn':'vi_VN.TCVN',\n'vi_vn.tcvn':'vi_VN.TCVN',\n'vi_vn.tcvn5712':'vi_VN.TCVN',\n'vi_vn.viscii':'vi_VN.VISCII',\n'vi_vn.viscii111':'vi_VN.VISCII',\n'wa':'wa_BE.ISO8859-1',\n'wa_be':'wa_BE.ISO8859-1',\n'wae_ch':'wae_CH.UTF-8',\n'wal_et':'wal_ET.UTF-8',\n'wo_sn':'wo_SN.UTF-8',\n'xh':'xh_ZA.ISO8859-1',\n'xh_za':'xh_ZA.ISO8859-1',\n'yi':'yi_US.CP1255',\n'yi_us':'yi_US.CP1255',\n'yo_ng':'yo_NG.UTF-8',\n'yue_hk':'yue_HK.UTF-8',\n'yuw_pg':'yuw_PG.UTF-8',\n'zh':'zh_CN.eucCN',\n'zh_cn':'zh_CN.gb2312',\n'zh_cn.big5':'zh_TW.big5',\n'zh_cn.euc':'zh_CN.eucCN',\n'zh_hk':'zh_HK.big5hkscs',\n'zh_hk.big5hk':'zh_HK.big5hkscs',\n'zh_sg':'zh_SG.GB2312',\n'zh_sg.gbk':'zh_SG.GBK',\n'zh_tw':'zh_TW.big5',\n'zh_tw.euc':'zh_TW.eucTW',\n'zh_tw.euctw':'zh_TW.eucTW',\n'zu':'zu_ZA.ISO8859-1',\n'zu_za':'zu_ZA.ISO8859-1',\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nwindows_locale={\n0x0436:\"af_ZA\",\n0x041c:\"sq_AL\",\n0x0484:\"gsw_FR\",\n0x045e:\"am_ET\",\n0x0401:\"ar_SA\",\n0x0801:\"ar_IQ\",\n0x0c01:\"ar_EG\",\n0x1001:\"ar_LY\",\n0x1401:\"ar_DZ\",\n0x1801:\"ar_MA\",\n0x1c01:\"ar_TN\",\n0x2001:\"ar_OM\",\n0x2401:\"ar_YE\",\n0x2801:\"ar_SY\",\n0x2c01:\"ar_JO\",\n0x3001:\"ar_LB\",\n0x3401:\"ar_KW\",\n0x3801:\"ar_AE\",\n0x3c01:\"ar_BH\",\n0x4001:\"ar_QA\",\n0x042b:\"hy_AM\",\n0x044d:\"as_IN\",\n0x042c:\"az_AZ\",\n0x082c:\"az_AZ\",\n0x046d:\"ba_RU\",\n0x042d:\"eu_ES\",\n0x0423:\"be_BY\",\n0x0445:\"bn_IN\",\n0x201a:\"bs_BA\",\n0x141a:\"bs_BA\",\n0x047e:\"br_FR\",\n0x0402:\"bg_BG\",\n\n0x0403:\"ca_ES\",\n0x0004:\"zh_CHS\",\n0x0404:\"zh_TW\",\n0x0804:\"zh_CN\",\n0x0c04:\"zh_HK\",\n0x1004:\"zh_SG\",\n0x1404:\"zh_MO\",\n0x7c04:\"zh_CHT\",\n0x0483:\"co_FR\",\n0x041a:\"hr_HR\",\n0x101a:\"hr_BA\",\n0x0405:\"cs_CZ\",\n0x0406:\"da_DK\",\n0x048c:\"gbz_AF\",\n0x0465:\"div_MV\",\n0x0413:\"nl_NL\",\n0x0813:\"nl_BE\",\n0x0409:\"en_US\",\n0x0809:\"en_GB\",\n0x0c09:\"en_AU\",\n0x1009:\"en_CA\",\n0x1409:\"en_NZ\",\n0x1809:\"en_IE\",\n0x1c09:\"en_ZA\",\n0x2009:\"en_JA\",\n0x2409:\"en_CB\",\n0x2809:\"en_BZ\",\n0x2c09:\"en_TT\",\n0x3009:\"en_ZW\",\n0x3409:\"en_PH\",\n0x4009:\"en_IN\",\n0x4409:\"en_MY\",\n0x4809:\"en_IN\",\n0x0425:\"et_EE\",\n0x0438:\"fo_FO\",\n0x0464:\"fil_PH\",\n0x040b:\"fi_FI\",\n0x040c:\"fr_FR\",\n0x080c:\"fr_BE\",\n0x0c0c:\"fr_CA\",\n0x100c:\"fr_CH\",\n0x140c:\"fr_LU\",\n0x180c:\"fr_MC\",\n0x0462:\"fy_NL\",\n0x0456:\"gl_ES\",\n0x0437:\"ka_GE\",\n0x0407:\"de_DE\",\n0x0807:\"de_CH\",\n0x0c07:\"de_AT\",\n0x1007:\"de_LU\",\n0x1407:\"de_LI\",\n0x0408:\"el_GR\",\n0x046f:\"kl_GL\",\n0x0447:\"gu_IN\",\n0x0468:\"ha_NG\",\n0x040d:\"he_IL\",\n0x0439:\"hi_IN\",\n0x040e:\"hu_HU\",\n0x040f:\"is_IS\",\n0x0421:\"id_ID\",\n0x045d:\"iu_CA\",\n0x085d:\"iu_CA\",\n0x083c:\"ga_IE\",\n0x0410:\"it_IT\",\n0x0810:\"it_CH\",\n0x0411:\"ja_JP\",\n0x044b:\"kn_IN\",\n0x043f:\"kk_KZ\",\n0x0453:\"kh_KH\",\n0x0486:\"qut_GT\",\n0x0487:\"rw_RW\",\n0x0457:\"kok_IN\",\n0x0412:\"ko_KR\",\n0x0440:\"ky_KG\",\n0x0454:\"lo_LA\",\n0x0426:\"lv_LV\",\n0x0427:\"lt_LT\",\n0x082e:\"dsb_DE\",\n0x046e:\"lb_LU\",\n0x042f:\"mk_MK\",\n0x043e:\"ms_MY\",\n0x083e:\"ms_BN\",\n0x044c:\"ml_IN\",\n0x043a:\"mt_MT\",\n0x0481:\"mi_NZ\",\n0x047a:\"arn_CL\",\n0x044e:\"mr_IN\",\n0x047c:\"moh_CA\",\n0x0450:\"mn_MN\",\n0x0850:\"mn_CN\",\n0x0461:\"ne_NP\",\n0x0414:\"nb_NO\",\n0x0814:\"nn_NO\",\n0x0482:\"oc_FR\",\n0x0448:\"or_IN\",\n0x0463:\"ps_AF\",\n0x0429:\"fa_IR\",\n0x0415:\"pl_PL\",\n0x0416:\"pt_BR\",\n0x0816:\"pt_PT\",\n0x0446:\"pa_IN\",\n0x046b:\"quz_BO\",\n0x086b:\"quz_EC\",\n0x0c6b:\"quz_PE\",\n0x0418:\"ro_RO\",\n0x0417:\"rm_CH\",\n0x0419:\"ru_RU\",\n0x243b:\"smn_FI\",\n0x103b:\"smj_NO\",\n0x143b:\"smj_SE\",\n0x043b:\"se_NO\",\n0x083b:\"se_SE\",\n0x0c3b:\"se_FI\",\n0x203b:\"sms_FI\",\n0x183b:\"sma_NO\",\n0x1c3b:\"sma_SE\",\n0x044f:\"sa_IN\",\n0x0c1a:\"sr_SP\",\n0x1c1a:\"sr_BA\",\n0x081a:\"sr_SP\",\n0x181a:\"sr_BA\",\n0x045b:\"si_LK\",\n0x046c:\"ns_ZA\",\n0x0432:\"tn_ZA\",\n0x041b:\"sk_SK\",\n0x0424:\"sl_SI\",\n0x040a:\"es_ES\",\n0x080a:\"es_MX\",\n0x0c0a:\"es_ES\",\n0x100a:\"es_GT\",\n0x140a:\"es_CR\",\n0x180a:\"es_PA\",\n0x1c0a:\"es_DO\",\n0x200a:\"es_VE\",\n0x240a:\"es_CO\",\n0x280a:\"es_PE\",\n0x2c0a:\"es_AR\",\n0x300a:\"es_EC\",\n0x340a:\"es_CL\",\n0x380a:\"es_UR\",\n0x3c0a:\"es_PY\",\n0x400a:\"es_BO\",\n0x440a:\"es_SV\",\n0x480a:\"es_HN\",\n0x4c0a:\"es_NI\",\n0x500a:\"es_PR\",\n0x540a:\"es_US\",\n\n0x0441:\"sw_KE\",\n0x041d:\"sv_SE\",\n0x081d:\"sv_FI\",\n0x045a:\"syr_SY\",\n0x0428:\"tg_TJ\",\n0x085f:\"tmz_DZ\",\n0x0449:\"ta_IN\",\n0x0444:\"tt_RU\",\n0x044a:\"te_IN\",\n0x041e:\"th_TH\",\n0x0851:\"bo_BT\",\n0x0451:\"bo_CN\",\n0x041f:\"tr_TR\",\n0x0442:\"tk_TM\",\n0x0480:\"ug_CN\",\n0x0422:\"uk_UA\",\n0x042e:\"wen_DE\",\n0x0420:\"ur_PK\",\n0x0820:\"ur_IN\",\n0x0443:\"uz_UZ\",\n0x0843:\"uz_UZ\",\n0x042a:\"vi_VN\",\n0x0452:\"cy_GB\",\n0x0488:\"wo_SN\",\n0x0434:\"xh_ZA\",\n0x0485:\"sah_RU\",\n0x0478:\"ii_CN\",\n0x046a:\"yo_NG\",\n0x0435:\"zu_ZA\",\n}\n\ndef _print_locale():\n\n ''\n \n categories={}\n def _init_categories(categories=categories):\n for k,v in globals().items():\n if k[:3]=='LC_':\n categories[k]=v\n _init_categories()\n del categories['LC_ALL']\n \n print('Locale defaults as determined by getdefaultlocale():')\n print('-'*72)\n lang,enc=getdefaultlocale()\n print('Language: ',lang or '(undefined)')\n print('Encoding: ',enc or '(undefined)')\n print()\n \n print('Locale settings on startup:')\n print('-'*72)\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or '(undefined)')\n print(' Encoding: ',enc or '(undefined)')\n print()\n \n print()\n print('Locale settings after calling resetlocale():')\n print('-'*72)\n resetlocale()\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or '(undefined)')\n print(' Encoding: ',enc or '(undefined)')\n print()\n \n try :\n setlocale(LC_ALL,\"\")\n except :\n print('NOTE:')\n print('setlocale(LC_ALL, \"\") does not support the default locale')\n print('given in the OS environment variables.')\n else :\n print()\n print('Locale settings after calling setlocale(LC_ALL, \"\"):')\n print('-'*72)\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or '(undefined)')\n print(' Encoding: ',enc or '(undefined)')\n print()\n \n \n \ntry :\n LC_MESSAGES\nexcept NameError:\n pass\nelse :\n __all__.append(\"LC_MESSAGES\")\n \nif __name__ =='__main__':\n print('Locale aliasing:')\n print()\n _print_locale()\n print()\n print('Number formatting:')\n print()\n _test()\n", ["_bootlocale", "_collections_abc", "_locale", "builtins", "encodings", "encodings.aliases", "functools", "os", "re", "sys", "warnings"]], "nntplib": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\nimport socket\nimport collections\nimport datetime\nimport warnings\n\ntry :\n import ssl\nexcept ImportError:\n _have_ssl=False\nelse :\n _have_ssl=True\n \nfrom email.header import decode_header as _email_decode_header\nfrom socket import _GLOBAL_DEFAULT_TIMEOUT\n\n__all__=[\"NNTP\",\n\"NNTPError\",\"NNTPReplyError\",\"NNTPTemporaryError\",\n\"NNTPPermanentError\",\"NNTPProtocolError\",\"NNTPDataError\",\n\"decode_header\",\n]\n\n\n\n\n\n_MAXLINE=2048\n\n\n\nclass NNTPError(Exception):\n ''\n def __init__(self,*args):\n Exception.__init__(self,*args)\n try :\n self.response=args[0]\n except IndexError:\n self.response='No response given'\n \nclass NNTPReplyError(NNTPError):\n ''\n pass\n \nclass NNTPTemporaryError(NNTPError):\n ''\n pass\n \nclass NNTPPermanentError(NNTPError):\n ''\n pass\n \nclass NNTPProtocolError(NNTPError):\n ''\n pass\n \nclass NNTPDataError(NNTPError):\n ''\n pass\n \n \n \nNNTP_PORT=119\nNNTP_SSL_PORT=563\n\n\n_LONGRESP={\n'100',\n'101',\n'211',\n'215',\n'220',\n'221',\n'222',\n'224',\n'225',\n'230',\n'231',\n'282',\n}\n\n\n_DEFAULT_OVERVIEW_FMT=[\n\"subject\",\"from\",\"date\",\"message-id\",\"references\",\":bytes\",\":lines\"]\n\n\n_OVERVIEW_FMT_ALTERNATIVES={\n'bytes':':bytes',\n'lines':':lines',\n}\n\n\n_CRLF=b'\\r\\n'\n\nGroupInfo=collections.namedtuple('GroupInfo',\n['group','last','first','flag'])\n\nArticleInfo=collections.namedtuple('ArticleInfo',\n['number','message_id','lines'])\n\n\n\ndef decode_header(header_str):\n ''\n \n parts=[]\n for v,enc in _email_decode_header(header_str):\n if isinstance(v,bytes):\n parts.append(v.decode(enc or 'ascii'))\n else :\n parts.append(v)\n return ''.join(parts)\n \ndef _parse_overview_fmt(lines):\n ''\n\n\n \n fmt=[]\n for line in lines:\n if line[0]==':':\n \n name,_,suffix=line[1:].partition(':')\n name=':'+name\n else :\n \n name,_,suffix=line.partition(':')\n name=name.lower()\n name=_OVERVIEW_FMT_ALTERNATIVES.get(name,name)\n \n fmt.append(name)\n defaults=_DEFAULT_OVERVIEW_FMT\n if len(fmt)=len(fmt):\n \n \n \n continue\n field_name=fmt[i]\n is_metadata=field_name.startswith(':')\n if i >=n_defaults and not is_metadata:\n \n \n h=field_name+\": \"\n if token and token[:len(h)].lower()!=h:\n raise NNTPDataError(\"OVER/XOVER response doesn't include \"\n \"names of additional headers\")\n token=token[len(h):]if token else None\n fields[fmt[i]]=token\n overview.append((article_number,fields))\n return overview\n \ndef _parse_datetime(date_str,time_str=None ):\n ''\n\n\n \n if time_str is None :\n time_str=date_str[-6:]\n date_str=date_str[:-6]\n hours=int(time_str[:2])\n minutes=int(time_str[2:4])\n seconds=int(time_str[4:])\n year=int(date_str[:-4])\n month=int(date_str[-4:-2])\n day=int(date_str[-2:])\n \n \n if year <70:\n year +=2000\n elif year <100:\n year +=1900\n return datetime.datetime(year,month,day,hours,minutes,seconds)\n \ndef _unparse_datetime(dt,legacy=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(dt,datetime.datetime):\n time_str=\"000000\"\n else :\n time_str=\"{0.hour:02d}{0.minute:02d}{0.second:02d}\".format(dt)\n y=dt.year\n if legacy:\n y=y %100\n date_str=\"{0:02d}{1.month:02d}{1.day:02d}\".format(y,dt)\n else :\n date_str=\"{0:04d}{1.month:02d}{1.day:02d}\".format(y,dt)\n return date_str,time_str\n \n \nif _have_ssl:\n\n def _encrypt_on(sock,context,hostname):\n ''\n\n\n\n\n \n \n if context is None :\n context=ssl._create_stdlib_context()\n return context.wrap_socket(sock,server_hostname=hostname)\n \n \n \nclass _NNTPBase:\n\n\n\n\n\n\n\n\n\n\n\n\n encoding='utf-8'\n errors='surrogateescape'\n \n def __init__(self,file,host,\n readermode=None ,timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self.host=host\n self.file=file\n self.debugging=0\n self.welcome=self._getresp()\n \n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n \n \n \n \n self.readermode_afterauth=False\n if readermode and 'READER'not in self._caps:\n self._setreadermode()\n if not self.readermode_afterauth:\n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n self.tls_on=False\n \n \n self.authenticated=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n is_connected=lambda :hasattr(self,\"file\")\n if is_connected():\n try :\n self.quit()\n except (OSError,EOFError):\n pass\n finally :\n if is_connected():\n self._close()\n \n def getwelcome(self):\n ''\n\n\n \n \n if self.debugging:print('*welcome*',repr(self.welcome))\n return self.welcome\n \n def getcapabilities(self):\n ''\n\n \n if self._caps is None :\n self.nntp_version=1\n self.nntp_implementation=None\n try :\n resp,caps=self.capabilities()\n except (NNTPPermanentError,NNTPTemporaryError):\n \n self._caps={}\n else :\n self._caps=caps\n if 'VERSION'in caps:\n \n \n self.nntp_version=max(map(int,caps['VERSION']))\n if 'IMPLEMENTATION'in caps:\n self.nntp_implementation=' '.join(caps['IMPLEMENTATION'])\n return self._caps\n \n def set_debuglevel(self,level):\n ''\n\n\n \n \n self.debugging=level\n debug=set_debuglevel\n \n def _putline(self,line):\n ''\n \n line=line+_CRLF\n if self.debugging >1:print('*put*',repr(line))\n self.file.write(line)\n self.file.flush()\n \n def _putcmd(self,line):\n ''\n \n if self.debugging:print('*cmd*',repr(line))\n line=line.encode(self.encoding,self.errors)\n self._putline(line)\n \n def _getline(self,strip_crlf=True ):\n ''\n\n \n line=self.file.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise NNTPDataError('line too long')\n if self.debugging >1:\n print('*get*',repr(line))\n if not line:raise EOFError\n if strip_crlf:\n if line[-2:]==_CRLF:\n line=line[:-2]\n elif line[-1:]in _CRLF:\n line=line[:-1]\n return line\n \n def _getresp(self):\n ''\n\n \n resp=self._getline()\n if self.debugging:print('*resp*',repr(resp))\n resp=resp.decode(self.encoding,self.errors)\n c=resp[:1]\n if c =='4':\n raise NNTPTemporaryError(resp)\n if c =='5':\n raise NNTPPermanentError(resp)\n if c not in '123':\n raise NNTPProtocolError(resp)\n return resp\n \n def _getlongresp(self,file=None ):\n ''\n\n\n\n\n\n \n \n openedFile=None\n try :\n \n if isinstance(file,(str,bytes)):\n openedFile=file=open(file,\"wb\")\n \n resp=self._getresp()\n if resp[:3]not in _LONGRESP:\n raise NNTPReplyError(resp)\n \n lines=[]\n if file is not None :\n \n terminators=(b'.'+_CRLF,b'.\\n')\n while 1:\n line=self._getline(False )\n if line in terminators:\n break\n if line.startswith(b'..'):\n line=line[1:]\n file.write(line)\n else :\n terminator=b'.'\n while 1:\n line=self._getline()\n if line ==terminator:\n break\n if line.startswith(b'..'):\n line=line[1:]\n lines.append(line)\n finally :\n \n if openedFile:\n openedFile.close()\n \n return resp,lines\n \n def _shortcmd(self,line):\n ''\n \n self._putcmd(line)\n return self._getresp()\n \n def _longcmd(self,line,file=None ):\n ''\n \n self._putcmd(line)\n return self._getlongresp(file)\n \n def _longcmdstring(self,line,file=None ):\n ''\n\n\n \n self._putcmd(line)\n resp,list=self._getlongresp(file)\n return resp,[line.decode(self.encoding,self.errors)\n for line in list]\n \n def _getoverviewfmt(self):\n ''\n \n try :\n return self._cachedoverviewfmt\n except AttributeError:\n pass\n try :\n resp,lines=self._longcmdstring(\"LIST OVERVIEW.FMT\")\n except NNTPPermanentError:\n \n fmt=_DEFAULT_OVERVIEW_FMT[:]\n else :\n fmt=_parse_overview_fmt(lines)\n self._cachedoverviewfmt=fmt\n return fmt\n \n def _grouplist(self,lines):\n \n return [GroupInfo(*line.split())for line in lines]\n \n def capabilities(self):\n ''\n\n\n\n\n \n caps={}\n resp,lines=self._longcmdstring(\"CAPABILITIES\")\n for line in lines:\n name,*tokens=line.split()\n caps[name]=tokens\n return resp,caps\n \n def newgroups(self,date,*,file=None ):\n ''\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWGROUPS {0} {1}'.format(date_str,time_str)\n resp,lines=self._longcmdstring(cmd,file)\n return resp,self._grouplist(lines)\n \n def newnews(self,group,date,*,file=None ):\n ''\n\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWNEWS {0} {1} {2}'.format(group,date_str,time_str)\n return self._longcmdstring(cmd,file)\n \n def list(self,group_pattern=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if group_pattern is not None :\n command='LIST ACTIVE '+group_pattern\n else :\n command='LIST'\n resp,lines=self._longcmdstring(command,file)\n return resp,self._grouplist(lines)\n \n def _getdescriptions(self,group_pattern,return_all):\n line_pat=re.compile('^(?P[^ \\t]+)[ \\t]+(.*)$')\n \n resp,lines=self._longcmdstring('LIST NEWSGROUPS '+group_pattern)\n if not resp.startswith('215'):\n \n \n \n resp,lines=self._longcmdstring('XGTITLE '+group_pattern)\n groups={}\n for raw_line in lines:\n match=line_pat.search(raw_line.strip())\n if match:\n name,desc=match.group(1,2)\n if not return_all:\n return desc\n groups[name]=desc\n if return_all:\n return resp,groups\n else :\n \n return ''\n \n def description(self,group):\n ''\n\n\n\n\n\n\n\n\n \n return self._getdescriptions(group,False )\n \n def descriptions(self,group_pattern):\n ''\n return self._getdescriptions(group_pattern,True )\n \n def group(self,name):\n ''\n\n\n\n\n\n\n\n \n resp=self._shortcmd('GROUP '+name)\n if not resp.startswith('211'):\n raise NNTPReplyError(resp)\n words=resp.split()\n count=first=last=0\n n=len(words)\n if n >1:\n count=words[1]\n if n >2:\n first=words[2]\n if n >3:\n last=words[3]\n if n >4:\n name=words[4].lower()\n return resp,int(count),int(first),int(last),name\n \n def help(self,*,file=None ):\n ''\n\n\n\n\n\n \n return self._longcmdstring('HELP',file)\n \n def _statparse(self,resp):\n ''\n \n if not resp.startswith('22'):\n raise NNTPReplyError(resp)\n words=resp.split()\n art_num=int(words[1])\n message_id=words[2]\n return resp,art_num,message_id\n \n def _statcmd(self,line):\n ''\n resp=self._shortcmd(line)\n return self._statparse(resp)\n \n def stat(self,message_spec=None ):\n ''\n\n\n\n\n\n\n \n if message_spec:\n return self._statcmd('STAT {0}'.format(message_spec))\n else :\n return self._statcmd('STAT')\n \n def next(self):\n ''\n return self._statcmd('NEXT')\n \n def last(self):\n ''\n return self._statcmd('LAST')\n \n def _artcmd(self,line,file=None ):\n ''\n resp,lines=self._longcmd(line,file)\n resp,art_num,message_id=self._statparse(resp)\n return resp,ArticleInfo(art_num,message_id,lines)\n \n def head(self,message_spec=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if message_spec is not None :\n cmd='HEAD {0}'.format(message_spec)\n else :\n cmd='HEAD'\n return self._artcmd(cmd,file)\n \n def body(self,message_spec=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if message_spec is not None :\n cmd='BODY {0}'.format(message_spec)\n else :\n cmd='BODY'\n return self._artcmd(cmd,file)\n \n def article(self,message_spec=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if message_spec is not None :\n cmd='ARTICLE {0}'.format(message_spec)\n else :\n cmd='ARTICLE'\n return self._artcmd(cmd,file)\n \n def slave(self):\n ''\n\n \n return self._shortcmd('SLAVE')\n \n def xhdr(self,hdr,str,*,file=None ):\n ''\n\n\n\n\n\n\n \n pat=re.compile('^([0-9]+) ?(.*)\\n?')\n resp,lines=self._longcmdstring('XHDR {0} {1}'.format(hdr,str),file)\n def remove_number(line):\n m=pat.match(line)\n return m.group(1,2)if m else line\n return resp,[remove_number(line)for line in lines]\n \n def xover(self,start,end,*,file=None ):\n ''\n\n\n\n\n\n\n \n resp,lines=self._longcmdstring('XOVER {0}-{1}'.format(start,end),\n file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def over(self,message_spec,*,file=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n cmd='OVER'if 'OVER'in self._caps else 'XOVER'\n if isinstance(message_spec,(tuple,list)):\n start,end=message_spec\n cmd +=' {0}-{1}'.format(start,end or '')\n elif message_spec is not None :\n cmd=cmd+' '+message_spec\n resp,lines=self._longcmdstring(cmd,file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def xgtitle(self,group,*,file=None ):\n ''\n\n\n\n \n warnings.warn(\"The XGTITLE extension is not actively used, \"\n \"use descriptions() instead\",\n DeprecationWarning,2)\n line_pat=re.compile('^([^ \\t]+)[ \\t]+(.*)$')\n resp,raw_lines=self._longcmdstring('XGTITLE '+group,file)\n lines=[]\n for raw_line in raw_lines:\n match=line_pat.search(raw_line.strip())\n if match:\n lines.append(match.group(1,2))\n return resp,lines\n \n def xpath(self,id):\n ''\n\n\n\n\n \n warnings.warn(\"The XPATH extension is not actively used\",\n DeprecationWarning,2)\n \n resp=self._shortcmd('XPATH {0}'.format(id))\n if not resp.startswith('223'):\n raise NNTPReplyError(resp)\n try :\n [resp_num,path]=resp.split()\n except ValueError:\n raise NNTPReplyError(resp)from None\n else :\n return resp,path\n \n def date(self):\n ''\n\n\n\n \n resp=self._shortcmd(\"DATE\")\n if not resp.startswith('111'):\n raise NNTPReplyError(resp)\n elem=resp.split()\n if len(elem)!=2:\n raise NNTPDataError(resp)\n date=elem[1]\n if len(date)!=14:\n raise NNTPDataError(resp)\n return resp,_parse_datetime(date,None )\n \n def _post(self,command,f):\n resp=self._shortcmd(command)\n \n if not resp.startswith('3'):\n raise NNTPReplyError(resp)\n if isinstance(f,(bytes,bytearray)):\n f=f.splitlines()\n \n \n \n \n for line in f:\n if not line.endswith(_CRLF):\n line=line.rstrip(b\"\\r\\n\")+_CRLF\n if line.startswith(b'.'):\n line=b'.'+line\n self.file.write(line)\n self.file.write(b\".\\r\\n\")\n self.file.flush()\n return self._getresp()\n \n def post(self,data):\n ''\n\n\n \n return self._post('POST',data)\n \n def ihave(self,message_id,data):\n ''\n\n\n\n\n \n return self._post('IHAVE {0}'.format(message_id),data)\n \n def _close(self):\n self.file.close()\n del self.file\n \n def quit(self):\n ''\n \n try :\n resp=self._shortcmd('QUIT')\n finally :\n self._close()\n return resp\n \n def login(self,user=None ,password=None ,usenetrc=True ):\n if self.authenticated:\n raise ValueError(\"Already logged in.\")\n if not user and not usenetrc:\n raise ValueError(\n \"At least one of `user` and `usenetrc` must be specified\")\n \n \n \n try :\n if usenetrc and not user:\n import netrc\n credentials=netrc.netrc()\n auth=credentials.authenticators(self.host)\n if auth:\n user=auth[0]\n password=auth[2]\n except OSError:\n pass\n \n if not user:\n return\n resp=self._shortcmd('authinfo user '+user)\n if resp.startswith('381'):\n if not password:\n raise NNTPReplyError(resp)\n else :\n resp=self._shortcmd('authinfo pass '+password)\n if not resp.startswith('281'):\n raise NNTPPermanentError(resp)\n \n self._caps=None\n self.getcapabilities()\n \n \n if self.readermode_afterauth and 'READER'not in self._caps:\n self._setreadermode()\n \n self._caps=None\n self.getcapabilities()\n \n def _setreadermode(self):\n try :\n self.welcome=self._shortcmd('mode reader')\n except NNTPPermanentError:\n \n pass\n except NNTPTemporaryError as e:\n if e.response.startswith('480'):\n \n self.readermode_afterauth=True\n else :\n raise\n \n if _have_ssl:\n def starttls(self,context=None ):\n ''\n\n \n \n \n if self.tls_on:\n raise ValueError(\"TLS is already enabled.\")\n if self.authenticated:\n raise ValueError(\"TLS cannot be started after authentication.\")\n resp=self._shortcmd('STARTTLS')\n if resp.startswith('382'):\n self.file.close()\n self.sock=_encrypt_on(self.sock,context,self.host)\n self.file=self.sock.makefile(\"rwb\")\n self.tls_on=True\n \n \n self._caps=None\n self.getcapabilities()\n else :\n raise NNTPError(\"TLS failed to start.\")\n \n \nclass NNTP(_NNTPBase):\n\n def __init__(self,host,port=NNTP_PORT,user=None ,password=None ,\n readermode=None ,usenetrc=False ,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.host=host\n self.port=port\n self.sock=socket.create_connection((host,port),timeout)\n file=None\n try :\n file=self.sock.makefile(\"rwb\")\n _NNTPBase.__init__(self,file,host,\n readermode,timeout)\n if user or usenetrc:\n self.login(user,password,usenetrc)\n except :\n if file:\n file.close()\n self.sock.close()\n raise\n \n def _close(self):\n try :\n _NNTPBase._close(self)\n finally :\n self.sock.close()\n \n \nif _have_ssl:\n class NNTP_SSL(_NNTPBase):\n \n def __init__(self,host,port=NNTP_SSL_PORT,\n user=None ,password=None ,ssl_context=None ,\n readermode=None ,usenetrc=False ,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n \n self.sock=socket.create_connection((host,port),timeout)\n file=None\n try :\n self.sock=_encrypt_on(self.sock,ssl_context,host)\n file=self.sock.makefile(\"rwb\")\n _NNTPBase.__init__(self,file,host,\n readermode=readermode,timeout=timeout)\n if user or usenetrc:\n self.login(user,password,usenetrc)\n except :\n if file:\n file.close()\n self.sock.close()\n raise\n \n def _close(self):\n try :\n _NNTPBase._close(self)\n finally :\n self.sock.close()\n \n __all__.append(\"NNTP_SSL\")\n \n \n \nif __name__ =='__main__':\n import argparse\n \n parser=argparse.ArgumentParser(description=\"\"\"\\\n nntplib built-in demo - display the latest articles in a newsgroup\"\"\")\n parser.add_argument('-g','--group',default='gmane.comp.python.general',\n help='group to fetch messages from (default: %(default)s)')\n parser.add_argument('-s','--server',default='news.gmane.org',\n help='NNTP server hostname (default: %(default)s)')\n parser.add_argument('-p','--port',default=-1,type=int,\n help='NNTP port number (default: %s / %s)'%(NNTP_PORT,NNTP_SSL_PORT))\n parser.add_argument('-n','--nb-articles',default=10,type=int,\n help='number of articles to fetch (default: %(default)s)')\n parser.add_argument('-S','--ssl',action='store_true',default=False ,\n help='use NNTP over SSL')\n args=parser.parse_args()\n \n port=args.port\n if not args.ssl:\n if port ==-1:\n port=NNTP_PORT\n s=NNTP(host=args.server,port=port)\n else :\n if port ==-1:\n port=NNTP_SSL_PORT\n s=NNTP_SSL(host=args.server,port=port)\n \n caps=s.getcapabilities()\n if 'STARTTLS'in caps:\n s.starttls()\n resp,count,first,last,name=s.group(args.group)\n print('Group',name,'has',count,'articles, range',first,'to',last)\n \n def cut(s,lim):\n if len(s)>lim:\n s=s[:lim -4]+\"...\"\n return s\n \n first=str(int(last)-args.nb_articles+1)\n resp,overviews=s.xover(first,last)\n for artnum,over in overviews:\n author=decode_header(over['from']).split('<',1)[0]\n subject=decode_header(over['subject'])\n lines=int(over[':lines'])\n print(\"{:7} {:20} {:42} ({})\".format(\n artnum,cut(author,20),cut(subject,42),lines)\n )\n \n s.quit()\n", ["argparse", "collections", "datetime", "email.header", "netrc", "re", "socket", "ssl", "warnings"]], "numbers": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) for numbers, according to PEP 3141.\n\nTODO: Fill out more detailed documentation on the operators.\"\"\"\n\nfrom abc import ABCMeta,abstractmethod\n\n__all__=[\"Number\",\"Complex\",\"Real\",\"Rational\",\"Integral\"]\n\nclass Number(metaclass=ABCMeta):\n ''\n\n\n\n \n __slots__=()\n \n \n __hash__=None\n \n \n \n \n \n \n \n \n \n \nclass Complex(Number):\n ''\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __complex__(self):\n ''\n \n def __bool__(self):\n ''\n return self !=0\n \n @property\n @abstractmethod\n def real(self):\n ''\n\n\n \n raise NotImplementedError\n \n @property\n @abstractmethod\n def imag(self):\n ''\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __add__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __radd__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __neg__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __pos__(self):\n ''\n raise NotImplementedError\n \n def __sub__(self,other):\n ''\n return self+-other\n \n def __rsub__(self,other):\n ''\n return -self+other\n \n @abstractmethod\n def __mul__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rmul__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __truediv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rtruediv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __pow__(self,exponent):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rpow__(self,base):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __abs__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def conjugate(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __eq__(self,other):\n ''\n raise NotImplementedError\n \nComplex.register(complex)\n\n\nclass Real(Complex):\n ''\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __float__(self):\n ''\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __trunc__(self):\n ''\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __floor__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __ceil__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __round__(self,ndigits=None ):\n ''\n\n\n\n \n raise NotImplementedError\n \n def __divmod__(self,other):\n ''\n\n\n\n \n return (self //other,self %other)\n \n def __rdivmod__(self,other):\n ''\n\n\n\n \n return (other //self,other %self)\n \n @abstractmethod\n def __floordiv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rfloordiv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __mod__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rmod__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __lt__(self,other):\n ''\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __le__(self,other):\n ''\n raise NotImplementedError\n \n \n def __complex__(self):\n ''\n return complex(float(self))\n \n @property\n def real(self):\n ''\n return +self\n \n @property\n def imag(self):\n ''\n return 0\n \n def conjugate(self):\n ''\n return +self\n \nReal.register(float)\n\n\nclass Rational(Real):\n ''\n \n __slots__=()\n \n @property\n @abstractmethod\n def numerator(self):\n raise NotImplementedError\n \n @property\n @abstractmethod\n def denominator(self):\n raise NotImplementedError\n \n \n def __float__(self):\n ''\n\n\n\n\n\n \n return self.numerator /self.denominator\n \n \nclass Integral(Rational):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __int__(self):\n ''\n raise NotImplementedError\n \n def __index__(self):\n ''\n return int(self)\n \n @abstractmethod\n def __pow__(self,exponent,modulus=None ):\n ''\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __lshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rlshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rrshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __and__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rand__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __xor__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rxor__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __or__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __ror__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __invert__(self):\n ''\n raise NotImplementedError\n \n \n def __float__(self):\n ''\n return float(int(self))\n \n @property\n def numerator(self):\n ''\n return +self\n \n @property\n def denominator(self):\n ''\n return 1\n \nIntegral.register(int)\n", ["abc"]], "opcode": [".py", "\n\"\"\"\nopcode module - potentially shared between dis and other modules which\noperate on bytecodes (e.g. peephole optimizers).\n\"\"\"\n\n__all__=[\"cmp_op\",\"hasconst\",\"hasname\",\"hasjrel\",\"hasjabs\",\n\"haslocal\",\"hascompare\",\"hasfree\",\"opname\",\"opmap\",\n\"HAVE_ARGUMENT\",\"EXTENDED_ARG\",\"hasnargs\"]\n\n\n\n\n\n\n\n\ntry :\n from _opcode import stack_effect\n __all__.append('stack_effect')\nexcept ImportError:\n pass\n \ncmp_op=('<','<=','==','!=','>','>=','in','not in','is',\n'is not','exception match','BAD')\n\nhasconst=[]\nhasname=[]\nhasjrel=[]\nhasjabs=[]\nhaslocal=[]\nhascompare=[]\nhasfree=[]\nhasnargs=[]\n\nopmap={}\nopname=['<%r>'%(op,)for op in range(256)]\n\ndef def_op(name,op):\n opname[op]=name\n opmap[name]=op\n \ndef name_op(name,op):\n def_op(name,op)\n hasname.append(op)\n \ndef jrel_op(name,op):\n def_op(name,op)\n hasjrel.append(op)\n \ndef jabs_op(name,op):\n def_op(name,op)\n hasjabs.append(op)\n \n \n \n \ndef_op('POP_TOP',1)\ndef_op('ROT_TWO',2)\ndef_op('ROT_THREE',3)\ndef_op('DUP_TOP',4)\ndef_op('DUP_TOP_TWO',5)\n\ndef_op('NOP',9)\ndef_op('UNARY_POSITIVE',10)\ndef_op('UNARY_NEGATIVE',11)\ndef_op('UNARY_NOT',12)\n\ndef_op('UNARY_INVERT',15)\n\ndef_op('BINARY_MATRIX_MULTIPLY',16)\ndef_op('INPLACE_MATRIX_MULTIPLY',17)\n\ndef_op('BINARY_POWER',19)\ndef_op('BINARY_MULTIPLY',20)\n\ndef_op('BINARY_MODULO',22)\ndef_op('BINARY_ADD',23)\ndef_op('BINARY_SUBTRACT',24)\ndef_op('BINARY_SUBSCR',25)\ndef_op('BINARY_FLOOR_DIVIDE',26)\ndef_op('BINARY_TRUE_DIVIDE',27)\ndef_op('INPLACE_FLOOR_DIVIDE',28)\ndef_op('INPLACE_TRUE_DIVIDE',29)\n\ndef_op('GET_AITER',50)\ndef_op('GET_ANEXT',51)\ndef_op('BEFORE_ASYNC_WITH',52)\n\ndef_op('INPLACE_ADD',55)\ndef_op('INPLACE_SUBTRACT',56)\ndef_op('INPLACE_MULTIPLY',57)\n\ndef_op('INPLACE_MODULO',59)\ndef_op('STORE_SUBSCR',60)\ndef_op('DELETE_SUBSCR',61)\ndef_op('BINARY_LSHIFT',62)\ndef_op('BINARY_RSHIFT',63)\ndef_op('BINARY_AND',64)\ndef_op('BINARY_XOR',65)\ndef_op('BINARY_OR',66)\ndef_op('INPLACE_POWER',67)\ndef_op('GET_ITER',68)\ndef_op('GET_YIELD_FROM_ITER',69)\n\ndef_op('PRINT_EXPR',70)\ndef_op('LOAD_BUILD_CLASS',71)\ndef_op('YIELD_FROM',72)\ndef_op('GET_AWAITABLE',73)\n\ndef_op('INPLACE_LSHIFT',75)\ndef_op('INPLACE_RSHIFT',76)\ndef_op('INPLACE_AND',77)\ndef_op('INPLACE_XOR',78)\ndef_op('INPLACE_OR',79)\ndef_op('BREAK_LOOP',80)\ndef_op('WITH_CLEANUP_START',81)\ndef_op('WITH_CLEANUP_FINISH',82)\n\ndef_op('RETURN_VALUE',83)\ndef_op('IMPORT_STAR',84)\ndef_op('SETUP_ANNOTATIONS',85)\ndef_op('YIELD_VALUE',86)\ndef_op('POP_BLOCK',87)\ndef_op('END_FINALLY',88)\ndef_op('POP_EXCEPT',89)\n\nHAVE_ARGUMENT=90\n\nname_op('STORE_NAME',90)\nname_op('DELETE_NAME',91)\ndef_op('UNPACK_SEQUENCE',92)\njrel_op('FOR_ITER',93)\ndef_op('UNPACK_EX',94)\nname_op('STORE_ATTR',95)\nname_op('DELETE_ATTR',96)\nname_op('STORE_GLOBAL',97)\nname_op('DELETE_GLOBAL',98)\ndef_op('LOAD_CONST',100)\nhasconst.append(100)\nname_op('LOAD_NAME',101)\ndef_op('BUILD_TUPLE',102)\ndef_op('BUILD_LIST',103)\ndef_op('BUILD_SET',104)\ndef_op('BUILD_MAP',105)\nname_op('LOAD_ATTR',106)\ndef_op('COMPARE_OP',107)\nhascompare.append(107)\nname_op('IMPORT_NAME',108)\nname_op('IMPORT_FROM',109)\n\njrel_op('JUMP_FORWARD',110)\njabs_op('JUMP_IF_FALSE_OR_POP',111)\njabs_op('JUMP_IF_TRUE_OR_POP',112)\njabs_op('JUMP_ABSOLUTE',113)\njabs_op('POP_JUMP_IF_FALSE',114)\njabs_op('POP_JUMP_IF_TRUE',115)\n\nname_op('LOAD_GLOBAL',116)\n\njabs_op('CONTINUE_LOOP',119)\njrel_op('SETUP_LOOP',120)\njrel_op('SETUP_EXCEPT',121)\njrel_op('SETUP_FINALLY',122)\n\ndef_op('LOAD_FAST',124)\nhaslocal.append(124)\ndef_op('STORE_FAST',125)\nhaslocal.append(125)\ndef_op('DELETE_FAST',126)\nhaslocal.append(126)\n\ndef_op('RAISE_VARARGS',130)\ndef_op('CALL_FUNCTION',131)\ndef_op('MAKE_FUNCTION',132)\ndef_op('BUILD_SLICE',133)\ndef_op('LOAD_CLOSURE',135)\nhasfree.append(135)\ndef_op('LOAD_DEREF',136)\nhasfree.append(136)\ndef_op('STORE_DEREF',137)\nhasfree.append(137)\ndef_op('DELETE_DEREF',138)\nhasfree.append(138)\n\ndef_op('CALL_FUNCTION_KW',141)\ndef_op('CALL_FUNCTION_EX',142)\n\njrel_op('SETUP_WITH',143)\n\ndef_op('LIST_APPEND',145)\ndef_op('SET_ADD',146)\ndef_op('MAP_ADD',147)\n\ndef_op('LOAD_CLASSDEREF',148)\nhasfree.append(148)\n\ndef_op('EXTENDED_ARG',144)\nEXTENDED_ARG=144\n\ndef_op('BUILD_LIST_UNPACK',149)\ndef_op('BUILD_MAP_UNPACK',150)\ndef_op('BUILD_MAP_UNPACK_WITH_CALL',151)\ndef_op('BUILD_TUPLE_UNPACK',152)\ndef_op('BUILD_SET_UNPACK',153)\n\njrel_op('SETUP_ASYNC_WITH',154)\n\ndef_op('FORMAT_VALUE',155)\ndef_op('BUILD_CONST_KEY_MAP',156)\ndef_op('BUILD_STRING',157)\ndef_op('BUILD_TUPLE_UNPACK_WITH_CALL',158)\n\nname_op('LOAD_METHOD',160)\ndef_op('CALL_METHOD',161)\n\ndel def_op,name_op,jrel_op,jabs_op\n", ["_opcode"]], "operator": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n__all__=['abs','add','and_','attrgetter','concat','contains','countOf',\n'delitem','eq','floordiv','ge','getitem','gt','iadd','iand',\n'iconcat','ifloordiv','ilshift','imatmul','imod','imul',\n'index','indexOf','inv','invert','ior','ipow','irshift',\n'is_','is_not','isub','itemgetter','itruediv','ixor','le',\n'length_hint','lshift','lt','matmul','methodcaller','mod',\n'mul','ne','neg','not_','or_','pos','pow','rshift',\n'setitem','sub','truediv','truth','xor']\n\nfrom builtins import abs as _abs\n\n\n\n\ndef lt(a,b):\n ''\n return a =b\n \ndef gt(a,b):\n ''\n return a >b\n \n \n \ndef not_(a):\n ''\n return not a\n \ndef truth(a):\n ''\n return True if a else False\n \ndef is_(a,b):\n ''\n return a is b\n \ndef is_not(a,b):\n ''\n return a is not b\n \n \n \ndef abs(a):\n ''\n return _abs(a)\n \ndef add(a,b):\n ''\n return a+b\n \ndef and_(a,b):\n ''\n return a&b\n \ndef floordiv(a,b):\n ''\n return a //b\n \ndef index(a):\n ''\n return a.__index__()\n \ndef inv(a):\n ''\n return ~a\ninvert=inv\n\ndef lshift(a,b):\n ''\n return a <>b\n \ndef sub(a,b):\n ''\n return a -b\n \ndef truediv(a,b):\n ''\n return a /b\n \ndef xor(a,b):\n ''\n return a ^b\n \n \n \ndef concat(a,b):\n ''\n if not hasattr(a,'__getitem__'):\n msg=\"'%s' object can't be concatenated\"%type(a).__name__\n raise TypeError(msg)\n return a+b\n \ndef contains(a,b):\n ''\n return b in a\n \ndef countOf(a,b):\n ''\n count=0\n for i in a:\n if i ==b:\n count +=1\n return count\n \ndef delitem(a,b):\n ''\n del a[b]\n \ndef getitem(a,b):\n ''\n return a[b]\n \ndef indexOf(a,b):\n ''\n for i,j in enumerate(a):\n if j ==b:\n return i\n else :\n raise ValueError('sequence.index(x): x not in sequence')\n \ndef setitem(a,b,c):\n ''\n a[b]=c\n \ndef length_hint(obj,default=0):\n ''\n\n\n\n\n\n\n \n if not isinstance(default,int):\n msg=(\"'%s' object cannot be interpreted as an integer\"%\n type(default).__name__)\n raise TypeError(msg)\n \n try :\n return len(obj)\n except TypeError:\n pass\n \n try :\n hint=type(obj).__length_hint__\n except AttributeError:\n return default\n \n try :\n val=hint(obj)\n except TypeError:\n return default\n if val is NotImplemented:\n return default\n if not isinstance(val,int):\n msg=('__length_hint__ must be integer, not %s'%\n type(val).__name__)\n raise TypeError(msg)\n if val <0:\n msg='__length_hint__() should return >= 0'\n raise ValueError(msg)\n return val\n \n \n \nclass attrgetter:\n ''\n\n\n\n\n\n \n __slots__=('_attrs','_call')\n \n def __init__(self,attr,*attrs):\n if not attrs:\n if not isinstance(attr,str):\n raise TypeError('attribute name must be a string')\n self._attrs=(attr,)\n names=attr.split('.')\n def func(obj):\n for name in names:\n obj=getattr(obj,name)\n return obj\n self._call=func\n else :\n self._attrs=(attr,)+attrs\n getters=tuple(map(attrgetter,self._attrs))\n def func(obj):\n return tuple(getter(obj)for getter in getters)\n self._call=func\n \n def __call__(self,obj):\n return self._call(obj)\n \n def __repr__(self):\n return '%s.%s(%s)'%(self.__class__.__module__,\n self.__class__.__qualname__,\n ', '.join(map(repr,self._attrs)))\n \n def __reduce__(self):\n return self.__class__,self._attrs\n \nclass itemgetter:\n ''\n\n\n\n \n __slots__=('_items','_call')\n \n def __init__(self,item,*items):\n if not items:\n self._items=(item,)\n def func(obj):\n return obj[item]\n self._call=func\n else :\n self._items=items=(item,)+items\n def func(obj):\n return tuple(obj[i]for i in items)\n self._call=func\n \n def __call__(self,obj):\n return self._call(obj)\n \n def __repr__(self):\n return '%s.%s(%s)'%(self.__class__.__module__,\n self.__class__.__name__,\n ', '.join(map(repr,self._items)))\n \n def __reduce__(self):\n return self.__class__,self._items\n \nclass methodcaller:\n ''\n\n\n\n\n \n __slots__=('_name','_args','_kwargs')\n \n def __init__(*args,**kwargs):\n if len(args)<2:\n msg=\"methodcaller needs at least one argument, the method name\"\n raise TypeError(msg)\n self=args[0]\n self._name=args[1]\n if not isinstance(self._name,str):\n raise TypeError('method name must be a string')\n self._args=args[2:]\n self._kwargs=kwargs\n \n def __call__(self,obj):\n return getattr(obj,self._name)(*self._args,**self._kwargs)\n \n def __repr__(self):\n args=[repr(self._name)]\n args.extend(map(repr,self._args))\n args.extend('%s=%r'%(k,v)for k,v in self._kwargs.items())\n return '%s.%s(%s)'%(self.__class__.__module__,\n self.__class__.__name__,\n ', '.join(args))\n \n def __reduce__(self):\n if not self._kwargs:\n return self.__class__,(self._name,)+self._args\n else :\n from functools import partial\n return partial(self.__class__,self._name,**self._kwargs),self._args\n \n \n \n \ndef iadd(a,b):\n ''\n a +=b\n return a\n \ndef iand(a,b):\n ''\n a &=b\n return a\n \ndef iconcat(a,b):\n ''\n if not hasattr(a,'__getitem__'):\n msg=\"'%s' object can't be concatenated\"%type(a).__name__\n raise TypeError(msg)\n a +=b\n return a\n \ndef ifloordiv(a,b):\n ''\n a //=b\n return a\n \ndef ilshift(a,b):\n ''\n a <<=b\n return a\n \ndef imod(a,b):\n ''\n a %=b\n return a\n \ndef imul(a,b):\n ''\n a *=b\n return a\n \ndef imatmul(a,b):\n ''\n a @=b\n return a\n \ndef ior(a,b):\n ''\n a |=b\n return a\n \ndef ipow(a,b):\n ''\n a **=b\n return a\n \ndef irshift(a,b):\n ''\n a >>=b\n return a\n \ndef isub(a,b):\n ''\n a -=b\n return a\n \ndef itruediv(a,b):\n ''\n a /=b\n return a\n \ndef ixor(a,b):\n ''\n a ^=b\n return a\n \n \ntry :\n from _operator import *\nexcept ImportError:\n pass\nelse :\n from _operator import __doc__\n \n \n \n__lt__=lt\n__le__=le\n__eq__=eq\n__ne__=ne\n__ge__=ge\n__gt__=gt\n__not__=not_\n__abs__=abs\n__add__=add\n__and__=and_\n__floordiv__=floordiv\n__index__=index\n__inv__=inv\n__invert__=invert\n__lshift__=lshift\n__mod__=mod\n__mul__=mul\n__matmul__=matmul\n__neg__=neg\n__or__=or_\n__pos__=pos\n__pow__=pow\n__rshift__=rshift\n__sub__=sub\n__truediv__=truediv\n__xor__=xor\n__concat__=concat\n__contains__=contains\n__delitem__=delitem\n__getitem__=getitem\n__setitem__=setitem\n__iadd__=iadd\n__iand__=iand\n__iconcat__=iconcat\n__ifloordiv__=ifloordiv\n__ilshift__=ilshift\n__imod__=imod\n__imul__=imul\n__imatmul__=imatmul\n__ior__=ior\n__ipow__=ipow\n__irshift__=irshift\n__isub__=isub\n__itruediv__=itruediv\n__ixor__=ixor\n", ["_operator", "builtins", "functools"]], "optparse": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__=\"1.5.3\"\n\n__all__=['Option',\n'make_option',\n'SUPPRESS_HELP',\n'SUPPRESS_USAGE',\n'Values',\n'OptionContainer',\n'OptionGroup',\n'OptionParser',\n'HelpFormatter',\n'IndentedHelpFormatter',\n'TitledHelpFormatter',\n'OptParseError',\n'OptionError',\n'OptionConflictError',\n'OptionValueError',\n'BadOptionError',\n'check_choice']\n\n__copyright__=\"\"\"\nCopyright (c) 2001-2006 Gregory P. Ward. All rights reserved.\nCopyright (c) 2002-2006 Python Software Foundation. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n * Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n * Neither the name of the author nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\nIS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED\nTO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A\nPARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\nEXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\nPROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\nPROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\nLIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\nNEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"\n\nimport sys,os\nimport textwrap\n\ndef _repr(self):\n return \"<%s at 0x%x: %s>\"%(self.__class__.__name__,id(self),self)\n \n \n \n \n \n \n \n \ntry :\n from gettext import gettext,ngettext\nexcept ImportError:\n def gettext(message):\n return message\n \n def ngettext(singular,plural,n):\n if n ==1:\n return singular\n return plural\n \n_=gettext\n\n\nclass OptParseError(Exception):\n def __init__(self,msg):\n self.msg=msg\n \n def __str__(self):\n return self.msg\n \n \nclass OptionError(OptParseError):\n ''\n\n\n \n \n def __init__(self,msg,option):\n self.msg=msg\n self.option_id=str(option)\n \n def __str__(self):\n if self.option_id:\n return \"option %s: %s\"%(self.option_id,self.msg)\n else :\n return self.msg\n \nclass OptionConflictError(OptionError):\n ''\n\n \n \nclass OptionValueError(OptParseError):\n ''\n\n\n \n \nclass BadOptionError(OptParseError):\n ''\n\n \n def __init__(self,opt_str):\n self.opt_str=opt_str\n \n def __str__(self):\n return _(\"no such option: %s\")%self.opt_str\n \nclass AmbiguousOptionError(BadOptionError):\n ''\n\n \n def __init__(self,opt_str,possibilities):\n BadOptionError.__init__(self,opt_str)\n self.possibilities=possibilities\n \n def __str__(self):\n return (_(\"ambiguous option: %s (%s?)\")\n %(self.opt_str,\", \".join(self.possibilities)))\n \n \nclass HelpFormatter:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n NO_DEFAULT_VALUE=\"none\"\n \n def __init__(self,\n indent_increment,\n max_help_position,\n width,\n short_first):\n self.parser=None\n self.indent_increment=indent_increment\n if width is None :\n try :\n width=int(os.environ['COLUMNS'])\n except (KeyError,ValueError):\n width=80\n width -=2\n self.width=width\n self.help_position=self.max_help_position=\\\n min(max_help_position,max(width -20,indent_increment *2))\n self.current_indent=0\n self.level=0\n self.help_width=None\n self.short_first=short_first\n self.default_tag=\"%default\"\n self.option_strings={}\n self._short_opt_fmt=\"%s %s\"\n self._long_opt_fmt=\"%s=%s\"\n \n def set_parser(self,parser):\n self.parser=parser\n \n def set_short_opt_delimiter(self,delim):\n if delim not in (\"\",\" \"):\n raise ValueError(\n \"invalid metavar delimiter for short options: %r\"%delim)\n self._short_opt_fmt=\"%s\"+delim+\"%s\"\n \n def set_long_opt_delimiter(self,delim):\n if delim not in (\"=\",\" \"):\n raise ValueError(\n \"invalid metavar delimiter for long options: %r\"%delim)\n self._long_opt_fmt=\"%s\"+delim+\"%s\"\n \n def indent(self):\n self.current_indent +=self.indent_increment\n self.level +=1\n \n def dedent(self):\n self.current_indent -=self.indent_increment\n assert self.current_indent >=0,\"Indent decreased below 0.\"\n self.level -=1\n \n def format_usage(self,usage):\n raise NotImplementedError(\"subclasses must implement\")\n \n def format_heading(self,heading):\n raise NotImplementedError(\"subclasses must implement\")\n \n def _format_text(self,text):\n ''\n\n\n \n text_width=max(self.width -self.current_indent,11)\n indent=\" \"*self.current_indent\n return textwrap.fill(text,\n text_width,\n initial_indent=indent,\n subsequent_indent=indent)\n \n def format_description(self,description):\n if description:\n return self._format_text(description)+\"\\n\"\n else :\n return \"\"\n \n def format_epilog(self,epilog):\n if epilog:\n return \"\\n\"+self._format_text(epilog)+\"\\n\"\n else :\n return \"\"\n \n \n def expand_default(self,option):\n if self.parser is None or not self.default_tag:\n return option.help\n \n default_value=self.parser.defaults.get(option.dest)\n if default_value is NO_DEFAULT or default_value is None :\n default_value=self.NO_DEFAULT_VALUE\n \n return option.help.replace(self.default_tag,str(default_value))\n \n def format_option(self,option):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n result=[]\n opts=self.option_strings[option]\n opt_width=self.help_position -self.current_indent -2\n if len(opts)>opt_width:\n opts=\"%*s%s\\n\"%(self.current_indent,\"\",opts)\n indent_first=self.help_position\n else :\n opts=\"%*s%-*s \"%(self.current_indent,\"\",opt_width,opts)\n indent_first=0\n result.append(opts)\n if option.help:\n help_text=self.expand_default(option)\n help_lines=textwrap.wrap(help_text,self.help_width)\n result.append(\"%*s%s\\n\"%(indent_first,\"\",help_lines[0]))\n result.extend([\"%*s%s\\n\"%(self.help_position,\"\",line)\n for line in help_lines[1:]])\n elif opts[-1]!=\"\\n\":\n result.append(\"\\n\")\n return \"\".join(result)\n \n def store_option_strings(self,parser):\n self.indent()\n max_len=0\n for opt in parser.option_list:\n strings=self.format_option_strings(opt)\n self.option_strings[opt]=strings\n max_len=max(max_len,len(strings)+self.current_indent)\n self.indent()\n for group in parser.option_groups:\n for opt in group.option_list:\n strings=self.format_option_strings(opt)\n self.option_strings[opt]=strings\n max_len=max(max_len,len(strings)+self.current_indent)\n self.dedent()\n self.dedent()\n self.help_position=min(max_len+2,self.max_help_position)\n self.help_width=max(self.width -self.help_position,11)\n \n def format_option_strings(self,option):\n ''\n if option.takes_value():\n metavar=option.metavar or option.dest.upper()\n short_opts=[self._short_opt_fmt %(sopt,metavar)\n for sopt in option._short_opts]\n long_opts=[self._long_opt_fmt %(lopt,metavar)\n for lopt in option._long_opts]\n else :\n short_opts=option._short_opts\n long_opts=option._long_opts\n \n if self.short_first:\n opts=short_opts+long_opts\n else :\n opts=long_opts+short_opts\n \n return \", \".join(opts)\n \nclass IndentedHelpFormatter(HelpFormatter):\n ''\n \n \n def __init__(self,\n indent_increment=2,\n max_help_position=24,\n width=None ,\n short_first=1):\n HelpFormatter.__init__(\n self,indent_increment,max_help_position,width,short_first)\n \n def format_usage(self,usage):\n return _(\"Usage: %s\\n\")%usage\n \n def format_heading(self,heading):\n return \"%*s%s:\\n\"%(self.current_indent,\"\",heading)\n \n \nclass TitledHelpFormatter(HelpFormatter):\n ''\n \n \n def __init__(self,\n indent_increment=0,\n max_help_position=24,\n width=None ,\n short_first=0):\n HelpFormatter.__init__(\n self,indent_increment,max_help_position,width,short_first)\n \n def format_usage(self,usage):\n return \"%s %s\\n\"%(self.format_heading(_(\"Usage\")),usage)\n \n def format_heading(self,heading):\n return \"%s\\n%s\\n\"%(heading,\"=-\"[self.level]*len(heading))\n \n \ndef _parse_num(val,type):\n if val[:2].lower()==\"0x\":\n radix=16\n elif val[:2].lower()==\"0b\":\n radix=2\n val=val[2:]or \"0\"\n elif val[:1]==\"0\":\n radix=8\n else :\n radix=10\n \n return type(val,radix)\n \ndef _parse_int(val):\n return _parse_num(val,int)\n \n_builtin_cvt={\"int\":(_parse_int,_(\"integer\")),\n\"long\":(_parse_int,_(\"integer\")),\n\"float\":(float,_(\"floating-point\")),\n\"complex\":(complex,_(\"complex\"))}\n\ndef check_builtin(option,opt,value):\n (cvt,what)=_builtin_cvt[option.type]\n try :\n return cvt(value)\n except ValueError:\n raise OptionValueError(\n _(\"option %s: invalid %s value: %r\")%(opt,what,value))\n \ndef check_choice(option,opt,value):\n if value in option.choices:\n return value\n else :\n choices=\", \".join(map(repr,option.choices))\n raise OptionValueError(\n _(\"option %s: invalid choice: %r (choose from %s)\")\n %(opt,value,choices))\n \n \n \nNO_DEFAULT=(\"NO\",\"DEFAULT\")\n\n\nclass Option:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n ATTRS=['action',\n 'type',\n 'dest',\n 'default',\n 'nargs',\n 'const',\n 'choices',\n 'callback',\n 'callback_args',\n 'callback_kwargs',\n 'help',\n 'metavar']\n \n \n \n ACTIONS=(\"store\",\n \"store_const\",\n \"store_true\",\n \"store_false\",\n \"append\",\n \"append_const\",\n \"count\",\n \"callback\",\n \"help\",\n \"version\")\n \n \n \n \n STORE_ACTIONS=(\"store\",\n \"store_const\",\n \"store_true\",\n \"store_false\",\n \"append\",\n \"append_const\",\n \"count\")\n \n \n \n TYPED_ACTIONS=(\"store\",\n \"append\",\n \"callback\")\n \n \n \n ALWAYS_TYPED_ACTIONS=(\"store\",\n \"append\")\n \n \n CONST_ACTIONS=(\"store_const\",\n \"append_const\")\n \n \n \n TYPES=(\"string\",\"int\",\"long\",\"float\",\"complex\",\"choice\")\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n TYPE_CHECKER={\"int\":check_builtin,\n \"long\":check_builtin,\n \"float\":check_builtin,\n \"complex\":check_builtin,\n \"choice\":check_choice,\n }\n \n \n \n \n \n \n \n \n \n \n CHECK_METHODS=None\n \n \n \n \n def __init__(self,*opts,**attrs):\n \n \n self._short_opts=[]\n self._long_opts=[]\n opts=self._check_opt_strings(opts)\n self._set_opt_strings(opts)\n \n \n self._set_attrs(attrs)\n \n \n \n \n \n \n for checker in self.CHECK_METHODS:\n checker(self)\n \n def _check_opt_strings(self,opts):\n \n \n \n opts=[opt for opt in opts if opt]\n if not opts:\n raise TypeError(\"at least one option string must be supplied\")\n return opts\n \n def _set_opt_strings(self,opts):\n for opt in opts:\n if len(opt)<2:\n raise OptionError(\n \"invalid option string %r: \"\n \"must be at least two characters long\"%opt,self)\n elif len(opt)==2:\n if not (opt[0]==\"-\"and opt[1]!=\"-\"):\n raise OptionError(\n \"invalid short option string %r: \"\n \"must be of the form -x, (x any non-dash char)\"%opt,\n self)\n self._short_opts.append(opt)\n else :\n if not (opt[0:2]==\"--\"and opt[2]!=\"-\"):\n raise OptionError(\n \"invalid long option string %r: \"\n \"must start with --, followed by non-dash\"%opt,\n self)\n self._long_opts.append(opt)\n \n def _set_attrs(self,attrs):\n for attr in self.ATTRS:\n if attr in attrs:\n setattr(self,attr,attrs[attr])\n del attrs[attr]\n else :\n if attr =='default':\n setattr(self,attr,NO_DEFAULT)\n else :\n setattr(self,attr,None )\n if attrs:\n attrs=sorted(attrs.keys())\n raise OptionError(\n \"invalid keyword arguments: %s\"%\", \".join(attrs),\n self)\n \n \n \n \n def _check_action(self):\n if self.action is None :\n self.action=\"store\"\n elif self.action not in self.ACTIONS:\n raise OptionError(\"invalid action: %r\"%self.action,self)\n \n def _check_type(self):\n if self.type is None :\n if self.action in self.ALWAYS_TYPED_ACTIONS:\n if self.choices is not None :\n \n self.type=\"choice\"\n else :\n \n self.type=\"string\"\n else :\n \n \n if isinstance(self.type,type):\n self.type=self.type.__name__\n \n if self.type ==\"str\":\n self.type=\"string\"\n \n if self.type not in self.TYPES:\n raise OptionError(\"invalid option type: %r\"%self.type,self)\n if self.action not in self.TYPED_ACTIONS:\n raise OptionError(\n \"must not supply a type for action %r\"%self.action,self)\n \n def _check_choice(self):\n if self.type ==\"choice\":\n if self.choices is None :\n raise OptionError(\n \"must supply a list of choices for type 'choice'\",self)\n elif not isinstance(self.choices,(tuple,list)):\n raise OptionError(\n \"choices must be a list of strings ('%s' supplied)\"\n %str(type(self.choices)).split(\"'\")[1],self)\n elif self.choices is not None :\n raise OptionError(\n \"must not supply choices for type %r\"%self.type,self)\n \n def _check_dest(self):\n \n \n takes_value=(self.action in self.STORE_ACTIONS or\n self.type is not None )\n if self.dest is None and takes_value:\n \n \n \n if self._long_opts:\n \n self.dest=self._long_opts[0][2:].replace('-','_')\n else :\n self.dest=self._short_opts[0][1]\n \n def _check_const(self):\n if self.action not in self.CONST_ACTIONS and self.const is not None :\n raise OptionError(\n \"'const' must not be supplied for action %r\"%self.action,\n self)\n \n def _check_nargs(self):\n if self.action in self.TYPED_ACTIONS:\n if self.nargs is None :\n self.nargs=1\n elif self.nargs is not None :\n raise OptionError(\n \"'nargs' must not be supplied for action %r\"%self.action,\n self)\n \n def _check_callback(self):\n if self.action ==\"callback\":\n if not callable(self.callback):\n raise OptionError(\n \"callback not callable: %r\"%self.callback,self)\n if (self.callback_args is not None and\n not isinstance(self.callback_args,tuple)):\n raise OptionError(\n \"callback_args, if supplied, must be a tuple: not %r\"\n %self.callback_args,self)\n if (self.callback_kwargs is not None and\n not isinstance(self.callback_kwargs,dict)):\n raise OptionError(\n \"callback_kwargs, if supplied, must be a dict: not %r\"\n %self.callback_kwargs,self)\n else :\n if self.callback is not None :\n raise OptionError(\n \"callback supplied (%r) for non-callback option\"\n %self.callback,self)\n if self.callback_args is not None :\n raise OptionError(\n \"callback_args supplied for non-callback option\",self)\n if self.callback_kwargs is not None :\n raise OptionError(\n \"callback_kwargs supplied for non-callback option\",self)\n \n \n CHECK_METHODS=[_check_action,\n _check_type,\n _check_choice,\n _check_dest,\n _check_const,\n _check_nargs,\n _check_callback]\n \n \n \n \n def __str__(self):\n return \"/\".join(self._short_opts+self._long_opts)\n \n __repr__=_repr\n \n def takes_value(self):\n return self.type is not None\n \n def get_opt_string(self):\n if self._long_opts:\n return self._long_opts[0]\n else :\n return self._short_opts[0]\n \n \n \n \n def check_value(self,opt,value):\n checker=self.TYPE_CHECKER.get(self.type)\n if checker is None :\n return value\n else :\n return checker(self,opt,value)\n \n def convert_value(self,opt,value):\n if value is not None :\n if self.nargs ==1:\n return self.check_value(opt,value)\n else :\n return tuple([self.check_value(opt,v)for v in value])\n \n def process(self,opt,value,values,parser):\n \n \n \n value=self.convert_value(opt,value)\n \n \n \n \n return self.take_action(\n self.action,self.dest,opt,value,values,parser)\n \n def take_action(self,action,dest,opt,value,values,parser):\n if action ==\"store\":\n setattr(values,dest,value)\n elif action ==\"store_const\":\n setattr(values,dest,self.const)\n elif action ==\"store_true\":\n setattr(values,dest,True )\n elif action ==\"store_false\":\n setattr(values,dest,False )\n elif action ==\"append\":\n values.ensure_value(dest,[]).append(value)\n elif action ==\"append_const\":\n values.ensure_value(dest,[]).append(self.const)\n elif action ==\"count\":\n setattr(values,dest,values.ensure_value(dest,0)+1)\n elif action ==\"callback\":\n args=self.callback_args or ()\n kwargs=self.callback_kwargs or {}\n self.callback(self,opt,value,parser,*args,**kwargs)\n elif action ==\"help\":\n parser.print_help()\n parser.exit()\n elif action ==\"version\":\n parser.print_version()\n parser.exit()\n else :\n raise ValueError(\"unknown action %r\"%self.action)\n \n return 1\n \n \n \n \nSUPPRESS_HELP=\"SUPPRESS\"+\"HELP\"\nSUPPRESS_USAGE=\"SUPPRESS\"+\"USAGE\"\n\nclass Values:\n\n def __init__(self,defaults=None ):\n if defaults:\n for (attr,val)in defaults.items():\n setattr(self,attr,val)\n \n def __str__(self):\n return str(self.__dict__)\n \n __repr__=_repr\n \n def __eq__(self,other):\n if isinstance(other,Values):\n return self.__dict__ ==other.__dict__\n elif isinstance(other,dict):\n return self.__dict__ ==other\n else :\n return NotImplemented\n \n def _update_careful(self,dict):\n ''\n\n\n\n\n \n for attr in dir(self):\n if attr in dict:\n dval=dict[attr]\n if dval is not None :\n setattr(self,attr,dval)\n \n def _update_loose(self,dict):\n ''\n\n\n\n \n self.__dict__.update(dict)\n \n def _update(self,dict,mode):\n if mode ==\"careful\":\n self._update_careful(dict)\n elif mode ==\"loose\":\n self._update_loose(dict)\n else :\n raise ValueError(\"invalid update mode: %r\"%mode)\n \n def read_module(self,modname,mode=\"careful\"):\n __import__(modname)\n mod=sys.modules[modname]\n self._update(vars(mod),mode)\n \n def read_file(self,filename,mode=\"careful\"):\n vars={}\n exec(open(filename).read(),vars)\n self._update(vars,mode)\n \n def ensure_value(self,attr,value):\n if not hasattr(self,attr)or getattr(self,attr)is None :\n setattr(self,attr,value)\n return getattr(self,attr)\n \n \nclass OptionContainer:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,option_class,conflict_handler,description):\n \n \n \n \n self._create_option_list()\n \n self.option_class=option_class\n self.set_conflict_handler(conflict_handler)\n self.set_description(description)\n \n def _create_option_mappings(self):\n \n \n \n self._short_opt={}\n self._long_opt={}\n self.defaults={}\n \n \n def _share_option_mappings(self,parser):\n \n \n self._short_opt=parser._short_opt\n self._long_opt=parser._long_opt\n self.defaults=parser.defaults\n \n def set_conflict_handler(self,handler):\n if handler not in (\"error\",\"resolve\"):\n raise ValueError(\"invalid conflict_resolution value %r\"%handler)\n self.conflict_handler=handler\n \n def set_description(self,description):\n self.description=description\n \n def get_description(self):\n return self.description\n \n \n def destroy(self):\n ''\n del self._short_opt\n del self._long_opt\n del self.defaults\n \n \n \n \n def _check_conflict(self,option):\n conflict_opts=[]\n for opt in option._short_opts:\n if opt in self._short_opt:\n conflict_opts.append((opt,self._short_opt[opt]))\n for opt in option._long_opts:\n if opt in self._long_opt:\n conflict_opts.append((opt,self._long_opt[opt]))\n \n if conflict_opts:\n handler=self.conflict_handler\n if handler ==\"error\":\n raise OptionConflictError(\n \"conflicting option string(s): %s\"\n %\", \".join([co[0]for co in conflict_opts]),\n option)\n elif handler ==\"resolve\":\n for (opt,c_option)in conflict_opts:\n if opt.startswith(\"--\"):\n c_option._long_opts.remove(opt)\n del self._long_opt[opt]\n else :\n c_option._short_opts.remove(opt)\n del self._short_opt[opt]\n if not (c_option._short_opts or c_option._long_opts):\n c_option.container.option_list.remove(c_option)\n \n def add_option(self,*args,**kwargs):\n ''\n\n \n if isinstance(args[0],str):\n option=self.option_class(*args,**kwargs)\n elif len(args)==1 and not kwargs:\n option=args[0]\n if not isinstance(option,Option):\n raise TypeError(\"not an Option instance: %r\"%option)\n else :\n raise TypeError(\"invalid arguments\")\n \n self._check_conflict(option)\n \n self.option_list.append(option)\n option.container=self\n for opt in option._short_opts:\n self._short_opt[opt]=option\n for opt in option._long_opts:\n self._long_opt[opt]=option\n \n if option.dest is not None :\n if option.default is not NO_DEFAULT:\n self.defaults[option.dest]=option.default\n elif option.dest not in self.defaults:\n self.defaults[option.dest]=None\n \n return option\n \n def add_options(self,option_list):\n for option in option_list:\n self.add_option(option)\n \n \n \n def get_option(self,opt_str):\n return (self._short_opt.get(opt_str)or\n self._long_opt.get(opt_str))\n \n def has_option(self,opt_str):\n return (opt_str in self._short_opt or\n opt_str in self._long_opt)\n \n def remove_option(self,opt_str):\n option=self._short_opt.get(opt_str)\n if option is None :\n option=self._long_opt.get(opt_str)\n if option is None :\n raise ValueError(\"no such option %r\"%opt_str)\n \n for opt in option._short_opts:\n del self._short_opt[opt]\n for opt in option._long_opts:\n del self._long_opt[opt]\n option.container.option_list.remove(option)\n \n \n \n \n def format_option_help(self,formatter):\n if not self.option_list:\n return \"\"\n result=[]\n for option in self.option_list:\n if not option.help is SUPPRESS_HELP:\n result.append(formatter.format_option(option))\n return \"\".join(result)\n \n def format_description(self,formatter):\n return formatter.format_description(self.get_description())\n \n def format_help(self,formatter):\n result=[]\n if self.description:\n result.append(self.format_description(formatter))\n if self.option_list:\n result.append(self.format_option_help(formatter))\n return \"\\n\".join(result)\n \n \nclass OptionGroup(OptionContainer):\n\n def __init__(self,parser,title,description=None ):\n self.parser=parser\n OptionContainer.__init__(\n self,parser.option_class,parser.conflict_handler,description)\n self.title=title\n \n def _create_option_list(self):\n self.option_list=[]\n self._share_option_mappings(self.parser)\n \n def set_title(self,title):\n self.title=title\n \n def destroy(self):\n ''\n OptionContainer.destroy(self)\n del self.option_list\n \n \n \n def format_help(self,formatter):\n result=formatter.format_heading(self.title)\n formatter.indent()\n result +=OptionContainer.format_help(self,formatter)\n formatter.dedent()\n return result\n \n \nclass OptionParser(OptionContainer):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n standard_option_list=[]\n \n def __init__(self,\n usage=None ,\n option_list=None ,\n option_class=Option,\n version=None ,\n conflict_handler=\"error\",\n description=None ,\n formatter=None ,\n add_help_option=True ,\n prog=None ,\n epilog=None ):\n OptionContainer.__init__(\n self,option_class,conflict_handler,description)\n self.set_usage(usage)\n self.prog=prog\n self.version=version\n self.allow_interspersed_args=True\n self.process_default_values=True\n if formatter is None :\n formatter=IndentedHelpFormatter()\n self.formatter=formatter\n self.formatter.set_parser(self)\n self.epilog=epilog\n \n \n \n \n \n self._populate_option_list(option_list,\n add_help=add_help_option)\n \n self._init_parsing_state()\n \n \n def destroy(self):\n ''\n\n\n\n\n \n OptionContainer.destroy(self)\n for group in self.option_groups:\n group.destroy()\n del self.option_list\n del self.option_groups\n del self.formatter\n \n \n \n \n \n def _create_option_list(self):\n self.option_list=[]\n self.option_groups=[]\n self._create_option_mappings()\n \n def _add_help_option(self):\n self.add_option(\"-h\",\"--help\",\n action=\"help\",\n help=_(\"show this help message and exit\"))\n \n def _add_version_option(self):\n self.add_option(\"--version\",\n action=\"version\",\n help=_(\"show program's version number and exit\"))\n \n def _populate_option_list(self,option_list,add_help=True ):\n if self.standard_option_list:\n self.add_options(self.standard_option_list)\n if option_list:\n self.add_options(option_list)\n if self.version:\n self._add_version_option()\n if add_help:\n self._add_help_option()\n \n def _init_parsing_state(self):\n \n self.rargs=None\n self.largs=None\n self.values=None\n \n \n \n \n def set_usage(self,usage):\n if usage is None :\n self.usage=_(\"%prog [options]\")\n elif usage is SUPPRESS_USAGE:\n self.usage=None\n \n elif usage.lower().startswith(\"usage: \"):\n self.usage=usage[7:]\n else :\n self.usage=usage\n \n def enable_interspersed_args(self):\n ''\n\n\n\n \n self.allow_interspersed_args=True\n \n def disable_interspersed_args(self):\n ''\n\n\n\n \n self.allow_interspersed_args=False\n \n def set_process_default_values(self,process):\n self.process_default_values=process\n \n def set_default(self,dest,value):\n self.defaults[dest]=value\n \n def set_defaults(self,**kwargs):\n self.defaults.update(kwargs)\n \n def _get_all_options(self):\n options=self.option_list[:]\n for group in self.option_groups:\n options.extend(group.option_list)\n return options\n \n def get_default_values(self):\n if not self.process_default_values:\n \n return Values(self.defaults)\n \n defaults=self.defaults.copy()\n for option in self._get_all_options():\n default=defaults.get(option.dest)\n if isinstance(default,str):\n opt_str=option.get_opt_string()\n defaults[option.dest]=option.check_value(opt_str,default)\n \n return Values(defaults)\n \n \n \n \n def add_option_group(self,*args,**kwargs):\n \n if isinstance(args[0],str):\n group=OptionGroup(self,*args,**kwargs)\n elif len(args)==1 and not kwargs:\n group=args[0]\n if not isinstance(group,OptionGroup):\n raise TypeError(\"not an OptionGroup instance: %r\"%group)\n if group.parser is not self:\n raise ValueError(\"invalid OptionGroup (wrong parser)\")\n else :\n raise TypeError(\"invalid arguments\")\n \n self.option_groups.append(group)\n return group\n \n def get_option_group(self,opt_str):\n option=(self._short_opt.get(opt_str)or\n self._long_opt.get(opt_str))\n if option and option.container is not self:\n return option.container\n return None\n \n \n \n \n def _get_args(self,args):\n if args is None :\n return sys.argv[1:]\n else :\n return args[:]\n \n def parse_args(self,args=None ,values=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n rargs=self._get_args(args)\n if values is None :\n values=self.get_default_values()\n \n \n \n \n \n \n \n \n \n \n self.rargs=rargs\n self.largs=largs=[]\n self.values=values\n \n try :\n stop=self._process_args(largs,rargs,values)\n except (BadOptionError,OptionValueError)as err:\n self.error(str(err))\n \n args=largs+rargs\n return self.check_values(values,args)\n \n def check_values(self,values,args):\n ''\n\n\n\n\n\n\n\n\n \n return (values,args)\n \n def _process_args(self,largs,rargs,values):\n ''\n\n\n\n\n\n\n\n \n while rargs:\n arg=rargs[0]\n \n \n \n if arg ==\"--\":\n del rargs[0]\n return\n elif arg[0:2]==\"--\":\n \n self._process_long_opt(rargs,values)\n elif arg[:1]==\"-\"and len(arg)>1:\n \n \n self._process_short_opts(rargs,values)\n elif self.allow_interspersed_args:\n largs.append(arg)\n del rargs[0]\n else :\n return\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def _match_long_opt(self,opt):\n ''\n\n\n\n\n \n return _match_abbrev(opt,self._long_opt)\n \n def _process_long_opt(self,rargs,values):\n arg=rargs.pop(0)\n \n \n \n if \"=\"in arg:\n (opt,next_arg)=arg.split(\"=\",1)\n rargs.insert(0,next_arg)\n had_explicit_value=True\n else :\n opt=arg\n had_explicit_value=False\n \n opt=self._match_long_opt(opt)\n option=self._long_opt[opt]\n if option.takes_value():\n nargs=option.nargs\n if len(rargs)>> import pdb\n >>> pdb.run('')\n\nThe debugger's prompt is '(Pdb) '. This will stop in the first\nfunction call in .\n\nAlternatively, if a statement terminated with an unhandled exception,\nyou can use pdb's post-mortem facility to inspect the contents of the\ntraceback:\n\n >>> \n \n >>> import pdb\n >>> pdb.pm()\n\nThe commands recognized by the debugger are listed in the next\nsection. Most can be abbreviated as indicated; e.g., h(elp) means\nthat 'help' can be typed as 'h' or 'help' (but not as 'he' or 'hel',\nnor as 'H' or 'Help' or 'HELP'). Optional arguments are enclosed in\nsquare brackets. Alternatives in the command syntax are separated\nby a vertical bar (|).\n\nA blank line repeats the previous command literally, except for\n'list', where it lists the next 11 lines.\n\nCommands that the debugger doesn't recognize are assumed to be Python\nstatements and are executed in the context of the program being\ndebugged. Python statements can also be prefixed with an exclamation\npoint ('!'). This is a powerful way to inspect the program being\ndebugged; it is even possible to change variables or call functions.\nWhen an exception occurs in such a statement, the exception name is\nprinted but the debugger's state is not changed.\n\nThe debugger supports aliases, which can save typing. And aliases can\nhave parameters (see the alias help entry) which allows one a certain\nlevel of adaptability to the context under examination.\n\nMultiple commands may be entered on a single line, separated by the\npair ';;'. No intelligence is applied to separating the commands; the\ninput is split at the first ';;', even if it is in the middle of a\nquoted string.\n\nIf a file \".pdbrc\" exists in your home directory or in the current\ndirectory, it is read in and executed as if it had been typed at the\ndebugger prompt. This is particularly useful for aliases. If both\nfiles exist, the one in the home directory is read first and aliases\ndefined there can be overridden by the local file. This behavior can be\ndisabled by passing the \"readrc=False\" argument to the Pdb constructor.\n\nAside from aliases, the debugger is not directly programmable; but it\nis implemented as a class from which you can derive your own debugger\nclass, which you can make as fancy as you like.\n\n\nDebugger commands\n=================\n\n\"\"\"\n\n\n\nimport os\nimport re\nimport sys\nimport cmd\nimport bdb\nimport dis\nimport code\nimport glob\nimport pprint\nimport signal\nimport inspect\nimport traceback\nimport linecache\n\n\nclass Restart(Exception):\n ''\n pass\n \n__all__=[\"run\",\"pm\",\"Pdb\",\"runeval\",\"runctx\",\"runcall\",\"set_trace\",\n\"post_mortem\",\"help\"]\n\ndef find_function(funcname,filename):\n cre=re.compile(r'def\\s+%s\\s*[(]'%re.escape(funcname))\n try :\n fp=open(filename)\n except OSError:\n return None\n \n with fp:\n for lineno,line in enumerate(fp,start=1):\n if cre.match(line):\n return funcname,filename,lineno\n return None\n \ndef getsourcelines(obj):\n lines,lineno=inspect.findsource(obj)\n if inspect.isframe(obj)and obj.f_globals is obj.f_locals:\n \n return lines,1\n elif inspect.ismodule(obj):\n return lines,1\n return inspect.getblock(lines[lineno:]),lineno+1\n \ndef lasti2lineno(code,lasti):\n linestarts=list(dis.findlinestarts(code))\n linestarts.reverse()\n for i,lineno in linestarts:\n if lasti >=i:\n return lineno\n return 0\n \n \nclass _rstr(str):\n ''\n def __repr__(self):\n return self\n \n \n \n \n \n \n \nline_prefix='\\n-> '\n\nclass Pdb(bdb.Bdb,cmd.Cmd):\n\n _previous_sigint_handler=None\n \n def __init__(self,completekey='tab',stdin=None ,stdout=None ,skip=None ,\n nosigint=False ,readrc=True ):\n bdb.Bdb.__init__(self,skip=skip)\n cmd.Cmd.__init__(self,completekey,stdin,stdout)\n if stdout:\n self.use_rawinput=0\n self.prompt='(Pdb) '\n self.aliases={}\n self.displaying={}\n self.mainpyfile=''\n self._wait_for_mainpyfile=False\n self.tb_lineno={}\n \n try :\n import readline\n \n readline.set_completer_delims(' \\t\\n`@#$%^&*()=+[{]}\\\\|;:\\'\",<>?')\n except ImportError:\n pass\n self.allow_kbdint=False\n self.nosigint=nosigint\n \n \n self.rcLines=[]\n if readrc:\n if 'HOME'in os.environ:\n envHome=os.environ['HOME']\n try :\n with open(os.path.join(envHome,\".pdbrc\"))as rcFile:\n self.rcLines.extend(rcFile)\n except OSError:\n pass\n try :\n with open(\".pdbrc\")as rcFile:\n self.rcLines.extend(rcFile)\n except OSError:\n pass\n \n self.commands={}\n self.commands_doprompt={}\n \n self.commands_silent={}\n \n self.commands_defining=False\n \n self.commands_bnum=None\n \n \n def sigint_handler(self,signum,frame):\n if self.allow_kbdint:\n raise KeyboardInterrupt\n self.message(\"\\nProgram interrupted. (Use 'cont' to resume).\")\n self.set_step()\n self.set_trace(frame)\n \n def reset(self):\n bdb.Bdb.reset(self)\n self.forget()\n \n def forget(self):\n self.lineno=None\n self.stack=[]\n self.curindex=0\n self.curframe=None\n self.tb_lineno.clear()\n \n def setup(self,f,tb):\n self.forget()\n self.stack,self.curindex=self.get_stack(f,tb)\n while tb:\n \n \n \n lineno=lasti2lineno(tb.tb_frame.f_code,tb.tb_lasti)\n self.tb_lineno[tb.tb_frame]=lineno\n tb=tb.tb_next\n self.curframe=self.stack[self.curindex][0]\n \n \n \n self.curframe_locals=self.curframe.f_locals\n return self.execRcLines()\n \n \n def execRcLines(self):\n if not self.rcLines:\n return\n \n rcLines=self.rcLines\n rcLines.reverse()\n \n self.rcLines=[]\n while rcLines:\n line=rcLines.pop().strip()\n if line and line[0]!='#':\n if self.onecmd(line):\n \n \n \n self.rcLines +=reversed(rcLines)\n return True\n \n \n \n def user_call(self,frame,argument_list):\n ''\n \n if self._wait_for_mainpyfile:\n return\n if self.stop_here(frame):\n self.message('--Call--')\n self.interaction(frame,None )\n \n def user_line(self,frame):\n ''\n if self._wait_for_mainpyfile:\n if (self.mainpyfile !=self.canonic(frame.f_code.co_filename)\n or frame.f_lineno <=0):\n return\n self._wait_for_mainpyfile=False\n if self.bp_commands(frame):\n self.interaction(frame,None )\n \n def bp_commands(self,frame):\n ''\n\n\n\n \n \n if getattr(self,\"currentbp\",False )and\\\n self.currentbp in self.commands:\n currentbp=self.currentbp\n self.currentbp=0\n lastcmd_back=self.lastcmd\n self.setup(frame,None )\n for line in self.commands[currentbp]:\n self.onecmd(line)\n self.lastcmd=lastcmd_back\n if not self.commands_silent[currentbp]:\n self.print_stack_entry(self.stack[self.curindex])\n if self.commands_doprompt[currentbp]:\n self._cmdloop()\n self.forget()\n return\n return 1\n \n def user_return(self,frame,return_value):\n ''\n if self._wait_for_mainpyfile:\n return\n frame.f_locals['__return__']=return_value\n self.message('--Return--')\n self.interaction(frame,None )\n \n def user_exception(self,frame,exc_info):\n ''\n \n if self._wait_for_mainpyfile:\n return\n exc_type,exc_value,exc_traceback=exc_info\n frame.f_locals['__exception__']=exc_type,exc_value\n \n \n \n \n \n \n prefix='Internal 'if (not exc_traceback\n and exc_type is StopIteration)else ''\n self.message('%s%s'%(prefix,\n traceback.format_exception_only(exc_type,exc_value)[-1].strip()))\n self.interaction(frame,exc_traceback)\n \n \n def _cmdloop(self):\n while True :\n try :\n \n \n self.allow_kbdint=True\n self.cmdloop()\n self.allow_kbdint=False\n break\n except KeyboardInterrupt:\n self.message('--KeyboardInterrupt--')\n \n \n def preloop(self):\n displaying=self.displaying.get(self.curframe)\n if displaying:\n for expr,oldvalue in displaying.items():\n newvalue=self._getval_except(expr)\n \n \n \n if newvalue is not oldvalue and newvalue !=oldvalue:\n displaying[expr]=newvalue\n self.message('display %s: %r [old: %r]'%\n (expr,newvalue,oldvalue))\n \n def interaction(self,frame,traceback):\n \n if Pdb._previous_sigint_handler:\n signal.signal(signal.SIGINT,Pdb._previous_sigint_handler)\n Pdb._previous_sigint_handler=None\n if self.setup(frame,traceback):\n \n \n self.forget()\n return\n self.print_stack_entry(self.stack[self.curindex])\n self._cmdloop()\n self.forget()\n \n def displayhook(self,obj):\n ''\n\n \n \n if obj is not None :\n self.message(repr(obj))\n \n def default(self,line):\n if line[:1]=='!':line=line[1:]\n locals=self.curframe_locals\n globals=self.curframe.f_globals\n try :\n code=compile(line+'\\n','','single')\n save_stdout=sys.stdout\n save_stdin=sys.stdin\n save_displayhook=sys.displayhook\n try :\n sys.stdin=self.stdin\n sys.stdout=self.stdout\n sys.displayhook=self.displayhook\n exec(code,globals,locals)\n finally :\n sys.stdout=save_stdout\n sys.stdin=save_stdin\n sys.displayhook=save_displayhook\n except :\n exc_info=sys.exc_info()[:2]\n self.error(traceback.format_exception_only(*exc_info)[-1].strip())\n \n def precmd(self,line):\n ''\n if not line.strip():\n return line\n args=line.split()\n while args[0]in self.aliases:\n line=self.aliases[args[0]]\n ii=1\n for tmpArg in args[1:]:\n line=line.replace(\"%\"+str(ii),\n tmpArg)\n ii +=1\n line=line.replace(\"%*\",' '.join(args[1:]))\n args=line.split()\n \n \n if args[0]!='alias':\n marker=line.find(';;')\n if marker >=0:\n \n next=line[marker+2:].lstrip()\n self.cmdqueue.append(next)\n line=line[:marker].rstrip()\n return line\n \n def onecmd(self,line):\n ''\n\n\n\n\n \n if not self.commands_defining:\n return cmd.Cmd.onecmd(self,line)\n else :\n return self.handle_command_def(line)\n \n def handle_command_def(self,line):\n ''\n cmd,arg,line=self.parseline(line)\n if not cmd:\n return\n if cmd =='silent':\n self.commands_silent[self.commands_bnum]=True\n return\n elif cmd =='end':\n self.cmdqueue=[]\n return 1\n cmdlist=self.commands[self.commands_bnum]\n if arg:\n cmdlist.append(cmd+' '+arg)\n else :\n cmdlist.append(cmd)\n \n try :\n func=getattr(self,'do_'+cmd)\n except AttributeError:\n func=self.default\n \n if func.__name__ in self.commands_resuming:\n self.commands_doprompt[self.commands_bnum]=False\n self.cmdqueue=[]\n return 1\n return\n \n \n \n def message(self,msg):\n print(msg,file=self.stdout)\n \n def error(self,msg):\n print('***',msg,file=self.stdout)\n \n \n \n \n def _complete_location(self,text,line,begidx,endidx):\n \n if line.strip().endswith((':',',')):\n \n return []\n \n try :\n ret=self._complete_expression(text,line,begidx,endidx)\n except Exception:\n ret=[]\n \n globs=glob.glob(text+'*')\n for fn in globs:\n if os.path.isdir(fn):\n ret.append(fn+'/')\n elif os.path.isfile(fn)and fn.lower().endswith(('.py','.pyw')):\n ret.append(fn+':')\n return ret\n \n def _complete_bpnumber(self,text,line,begidx,endidx):\n \n \n \n return [str(i)for i,bp in enumerate(bdb.Breakpoint.bpbynumber)\n if bp is not None and str(i).startswith(text)]\n \n def _complete_expression(self,text,line,begidx,endidx):\n \n if not self.curframe:\n return []\n \n \n \n ns=self.curframe.f_globals.copy()\n ns.update(self.curframe_locals)\n if '.'in text:\n \n \n \n dotted=text.split('.')\n try :\n obj=ns[dotted[0]]\n for part in dotted[1:-1]:\n obj=getattr(obj,part)\n except (KeyError,AttributeError):\n return []\n prefix='.'.join(dotted[:-1])+'.'\n return [prefix+n for n in dir(obj)if n.startswith(dotted[-1])]\n else :\n \n return [n for n in ns.keys()if n.startswith(text)]\n \n \n \n \n \n def do_commands(self,arg):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not arg:\n bnum=len(bdb.Breakpoint.bpbynumber)-1\n else :\n try :\n bnum=int(arg)\n except :\n self.error(\"Usage: commands [bnum]\\n ...\\n end\")\n return\n self.commands_bnum=bnum\n \n if bnum in self.commands:\n old_command_defs=(self.commands[bnum],\n self.commands_doprompt[bnum],\n self.commands_silent[bnum])\n else :\n old_command_defs=None\n self.commands[bnum]=[]\n self.commands_doprompt[bnum]=True\n self.commands_silent[bnum]=False\n \n prompt_back=self.prompt\n self.prompt='(com) '\n self.commands_defining=True\n try :\n self.cmdloop()\n except KeyboardInterrupt:\n \n if old_command_defs:\n self.commands[bnum]=old_command_defs[0]\n self.commands_doprompt[bnum]=old_command_defs[1]\n self.commands_silent[bnum]=old_command_defs[2]\n else :\n del self.commands[bnum]\n del self.commands_doprompt[bnum]\n del self.commands_silent[bnum]\n self.error('command definition aborted, old commands restored')\n finally :\n self.commands_defining=False\n self.prompt=prompt_back\n \n complete_commands=_complete_bpnumber\n \n def do_break(self,arg,temporary=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not arg:\n if self.breaks:\n self.message(\"Num Type Disp Enb Where\")\n for bp in bdb.Breakpoint.bpbynumber:\n if bp:\n self.message(bp.bpformat())\n return\n \n \n filename=None\n lineno=None\n cond=None\n comma=arg.find(',')\n if comma >0:\n \n cond=arg[comma+1:].lstrip()\n arg=arg[:comma].rstrip()\n \n colon=arg.rfind(':')\n funcname=None\n if colon >=0:\n filename=arg[:colon].rstrip()\n f=self.lookupmodule(filename)\n if not f:\n self.error('%r not found from sys.path'%filename)\n return\n else :\n filename=f\n arg=arg[colon+1:].lstrip()\n try :\n lineno=int(arg)\n except ValueError:\n self.error('Bad lineno: %s'%arg)\n return\n else :\n \n try :\n lineno=int(arg)\n except ValueError:\n try :\n func=eval(arg,\n self.curframe.f_globals,\n self.curframe_locals)\n except :\n func=arg\n try :\n if hasattr(func,'__func__'):\n func=func.__func__\n code=func.__code__\n \n \n funcname=code.co_name\n lineno=code.co_firstlineno\n filename=code.co_filename\n except :\n \n (ok,filename,ln)=self.lineinfo(arg)\n if not ok:\n self.error('The specified object %r is not a function '\n 'or was not found along sys.path.'%arg)\n return\n funcname=ok\n lineno=int(ln)\n if not filename:\n filename=self.defaultFile()\n \n line=self.checkline(filename,lineno)\n if line:\n \n err=self.set_break(filename,line,temporary,cond,funcname)\n if err:\n self.error(err)\n else :\n bp=self.get_breaks(filename,line)[-1]\n self.message(\"Breakpoint %d at %s:%d\"%\n (bp.number,bp.file,bp.line))\n \n \n def defaultFile(self):\n ''\n filename=self.curframe.f_code.co_filename\n if filename ==''and self.mainpyfile:\n filename=self.mainpyfile\n return filename\n \n do_b=do_break\n \n complete_break=_complete_location\n complete_b=_complete_location\n \n def do_tbreak(self,arg):\n ''\n\n\n \n self.do_break(arg,1)\n \n complete_tbreak=_complete_location\n \n def lineinfo(self,identifier):\n failed=(None ,None ,None )\n \n idstring=identifier.split(\"'\")\n if len(idstring)==1:\n \n id=idstring[0].strip()\n elif len(idstring)==3:\n \n id=idstring[1].strip()\n else :\n return failed\n if id =='':return failed\n parts=id.split('.')\n \n if parts[0]=='self':\n del parts[0]\n if len(parts)==0:\n return failed\n \n fname=self.defaultFile()\n if len(parts)==1:\n item=parts[0]\n else :\n \n \n f=self.lookupmodule(parts[0])\n if f:\n fname=f\n item=parts[1]\n answer=find_function(item,fname)\n return answer or failed\n \n def checkline(self,filename,lineno):\n ''\n\n\n\n \n \n \n globs=self.curframe.f_globals if hasattr(self,'curframe')else None\n line=linecache.getline(filename,lineno,globs)\n if not line:\n self.message('End of file')\n return 0\n line=line.strip()\n \n if (not line or (line[0]=='#')or\n (line[:3]=='\"\"\"')or line[:3]==\"'''\"):\n self.error('Blank or comment')\n return 0\n return lineno\n \n def do_enable(self,arg):\n ''\n\n\n \n args=arg.split()\n for i in args:\n try :\n bp=self.get_bpbynumber(i)\n except ValueError as err:\n self.error(err)\n else :\n bp.enable()\n self.message('Enabled %s'%bp)\n \n complete_enable=_complete_bpnumber\n \n def do_disable(self,arg):\n ''\n\n\n\n\n\n \n args=arg.split()\n for i in args:\n try :\n bp=self.get_bpbynumber(i)\n except ValueError as err:\n self.error(err)\n else :\n bp.disable()\n self.message('Disabled %s'%bp)\n \n complete_disable=_complete_bpnumber\n \n def do_condition(self,arg):\n ''\n\n\n\n\n \n args=arg.split(' ',1)\n try :\n cond=args[1]\n except IndexError:\n cond=None\n try :\n bp=self.get_bpbynumber(args[0].strip())\n except IndexError:\n self.error('Breakpoint number expected')\n except ValueError as err:\n self.error(err)\n else :\n bp.cond=cond\n if not cond:\n self.message('Breakpoint %d is now unconditional.'%bp.number)\n else :\n self.message('New condition set for breakpoint %d.'%bp.number)\n \n complete_condition=_complete_bpnumber\n \n def do_ignore(self,arg):\n ''\n\n\n\n\n\n\n \n args=arg.split()\n try :\n count=int(args[1].strip())\n except :\n count=0\n try :\n bp=self.get_bpbynumber(args[0].strip())\n except IndexError:\n self.error('Breakpoint number expected')\n except ValueError as err:\n self.error(err)\n else :\n bp.ignore=count\n if count >0:\n if count >1:\n countstr='%d crossings'%count\n else :\n countstr='1 crossing'\n self.message('Will ignore next %s of breakpoint %d.'%\n (countstr,bp.number))\n else :\n self.message('Will stop next time breakpoint %d is reached.'\n %bp.number)\n \n complete_ignore=_complete_bpnumber\n \n def do_clear(self,arg):\n ''\n\n\n\n\n \n if not arg:\n try :\n reply=input('Clear all breaks? ')\n except EOFError:\n reply='no'\n reply=reply.strip().lower()\n if reply in ('y','yes'):\n bplist=[bp for bp in bdb.Breakpoint.bpbynumber if bp]\n self.clear_all_breaks()\n for bp in bplist:\n self.message('Deleted %s'%bp)\n return\n if ':'in arg:\n \n i=arg.rfind(':')\n filename=arg[:i]\n arg=arg[i+1:]\n try :\n lineno=int(arg)\n except ValueError:\n err=\"Invalid line number (%s)\"%arg\n else :\n bplist=self.get_breaks(filename,lineno)\n err=self.clear_break(filename,lineno)\n if err:\n self.error(err)\n else :\n for bp in bplist:\n self.message('Deleted %s'%bp)\n return\n numberlist=arg.split()\n for i in numberlist:\n try :\n bp=self.get_bpbynumber(i)\n except ValueError as err:\n self.error(err)\n else :\n self.clear_bpbynumber(i)\n self.message('Deleted %s'%bp)\n do_cl=do_clear\n \n complete_clear=_complete_location\n complete_cl=_complete_location\n \n def do_where(self,arg):\n ''\n\n\n\n \n self.print_stack_trace()\n do_w=do_where\n do_bt=do_where\n \n def _select_frame(self,number):\n assert 0 <=number =2:\n self.error('No help for %r; please do not run Python with -OO '\n 'if you need command help'%arg)\n return\n self.message(command.__doc__.rstrip())\n \n do_h=do_help\n \n def help_exec(self):\n ''\n\n\n\n\n\n\n\n \n self.message((self.help_exec.__doc__ or '').strip())\n \n def help_pdb(self):\n help()\n \n \n \n def lookupmodule(self,filename):\n ''\n\n\n\n \n if os.path.isabs(filename)and os.path.exists(filename):\n return filename\n f=os.path.join(sys.path[0],filename)\n if os.path.exists(f)and self.canonic(f)==self.mainpyfile:\n return f\n root,ext=os.path.splitext(filename)\n if ext =='':\n filename=filename+'.py'\n if os.path.isabs(filename):\n return filename\n for dirname in sys.path:\n while os.path.islink(dirname):\n dirname=os.readlink(dirname)\n fullname=os.path.join(dirname,filename)\n if os.path.exists(fullname):\n return fullname\n return None\n \n def _runmodule(self,module_name):\n self._wait_for_mainpyfile=True\n self._user_requested_quit=False\n import runpy\n mod_name,mod_spec,code=runpy._get_module_details(module_name)\n self.mainpyfile=self.canonic(code.co_filename)\n import __main__\n __main__.__dict__.clear()\n __main__.__dict__.update({\n \"__name__\":\"__main__\",\n \"__file__\":self.mainpyfile,\n \"__package__\":mod_spec.parent,\n \"__loader__\":mod_spec.loader,\n \"__spec__\":mod_spec,\n \"__builtins__\":__builtins__,\n })\n self.run(code)\n \n def _runscript(self,filename):\n \n \n \n \n \n import __main__\n __main__.__dict__.clear()\n __main__.__dict__.update({\"__name__\":\"__main__\",\n \"__file__\":filename,\n \"__builtins__\":__builtins__,\n })\n \n \n \n \n \n \n self._wait_for_mainpyfile=True\n self.mainpyfile=self.canonic(filename)\n self._user_requested_quit=False\n with open(filename,\"rb\")as fp:\n statement=\"exec(compile(%r, %r, 'exec'))\"%\\\n (fp.read(),self.mainpyfile)\n self.run(statement)\n \n \n \nif __doc__ is not None :\n\n _help_order=[\n 'help','where','down','up','break','tbreak','clear','disable',\n 'enable','ignore','condition','commands','step','next','until',\n 'jump','return','retval','run','continue','list','longlist',\n 'args','p','pp','whatis','source','display','undisplay',\n 'interact','alias','unalias','debug','quit',\n ]\n \n for _command in _help_order:\n __doc__ +=getattr(Pdb,'do_'+_command).__doc__.strip()+'\\n\\n'\n __doc__ +=Pdb.help_exec.__doc__\n \n del _help_order,_command\n \n \n \n \ndef run(statement,globals=None ,locals=None ):\n Pdb().run(statement,globals,locals)\n \ndef runeval(expression,globals=None ,locals=None ):\n return Pdb().runeval(expression,globals,locals)\n \ndef runctx(statement,globals,locals):\n\n run(statement,globals,locals)\n \ndef runcall(*args,**kwds):\n return Pdb().runcall(*args,**kwds)\n \ndef set_trace(*,header=None ):\n pdb=Pdb()\n if header is not None :\n pdb.message(header)\n pdb.set_trace(sys._getframe().f_back)\n \n \n \ndef post_mortem(t=None ):\n\n if t is None :\n \n \n t=sys.exc_info()[2]\n if t is None :\n raise ValueError(\"A valid traceback must be passed if no \"\n \"exception is being handled\")\n \n p=Pdb()\n p.reset()\n p.interaction(None ,t)\n \ndef pm():\n post_mortem(sys.last_traceback)\n \n \n \n \nTESTCMD='import x; x.main()'\n\ndef test():\n run(TESTCMD)\n \n \ndef help():\n import pydoc\n pydoc.pager(__doc__)\n \n_usage=\"\"\"\\\nusage: pdb.py [-c command] ... [-m module | pyfile] [arg] ...\n\nDebug the Python program given by pyfile. Alternatively,\nan executable module or package to debug can be specified using\nthe -m switch.\n\nInitial commands are read from .pdbrc files in your home directory\nand in the current directory, if they exist. Commands supplied with\n-c are executed after commands from .pdbrc files.\n\nTo let the script run until an exception occurs, use \"-c continue\".\nTo let the script run up to a given line X in the debugged file, use\n\"-c 'until X'\".\"\"\"\n\ndef main():\n import getopt\n \n opts,args=getopt.getopt(sys.argv[1:],'mhc:',['--help','--command='])\n \n if not args:\n print(_usage)\n sys.exit(2)\n \n commands=[]\n run_as_module=False\n for opt,optarg in opts:\n if opt in ['-h','--help']:\n print(_usage)\n sys.exit()\n elif opt in ['-c','--command']:\n commands.append(optarg)\n elif opt in ['-m']:\n run_as_module=True\n \n mainpyfile=args[0]\n if not run_as_module and not os.path.exists(mainpyfile):\n print('Error:',mainpyfile,'does not exist')\n sys.exit(1)\n \n sys.argv[:]=args\n \n \n if not run_as_module:\n sys.path[0]=os.path.dirname(mainpyfile)\n \n \n \n \n \n pdb=Pdb()\n pdb.rcLines.extend(commands)\n while True :\n try :\n if run_as_module:\n pdb._runmodule(mainpyfile)\n else :\n pdb._runscript(mainpyfile)\n if pdb._user_requested_quit:\n break\n print(\"The program finished and will be restarted\")\n except Restart:\n print(\"Restarting\",mainpyfile,\"with arguments:\")\n print(\"\\t\"+\" \".join(args))\n except SystemExit:\n \n print(\"The program exited via sys.exit(). Exit status:\",end=' ')\n print(sys.exc_info()[1])\n except SyntaxError:\n traceback.print_exc()\n sys.exit(1)\n except :\n traceback.print_exc()\n print(\"Uncaught exception. Entering post mortem debugging\")\n print(\"Running 'cont' or 'step' will restart the program\")\n t=sys.exc_info()[2]\n pdb.interaction(None ,t)\n print(\"Post mortem debugger finished. The \"+mainpyfile+\n \" will be restarted\")\n \n \n \nif __name__ =='__main__':\n import pdb\n pdb.main()\n", ["__main__", "bdb", "cmd", "code", "dis", "getopt", "glob", "inspect", "linecache", "os", "pdb", "pprint", "pydoc", "re", "readline", "runpy", "shlex", "signal", "sys", "traceback"]], "pickle": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom types import FunctionType\nfrom copyreg import dispatch_table\nfrom copyreg import _extension_registry,_inverted_registry,_extension_cache\nfrom itertools import islice\nfrom functools import partial\nimport sys\nfrom sys import maxsize\nfrom struct import pack,unpack\nimport re\nimport io\nimport codecs\nimport _compat_pickle\n\n__all__=[\"PickleError\",\"PicklingError\",\"UnpicklingError\",\"Pickler\",\n\"Unpickler\",\"dump\",\"dumps\",\"load\",\"loads\"]\n\n\nbytes_types=(bytes,bytearray)\n\n\nformat_version=\"4.0\"\ncompatible_formats=[\"1.0\",\n\"1.1\",\n\"1.2\",\n\"1.3\",\n\"2.0\",\n\"3.0\",\n\"4.0\",\n]\n\n\nHIGHEST_PROTOCOL=4\n\n\n\n\nDEFAULT_PROTOCOL=3\n\nclass PickleError(Exception):\n ''\n pass\n \nclass PicklingError(PickleError):\n ''\n\n\n \n pass\n \nclass UnpicklingError(PickleError):\n ''\n\n\n\n\n\n\n \n pass\n \n \n \nclass _Stop(Exception):\n def __init__(self,value):\n self.value=value\n \n \ntry :\n from org.python.core import PyStringMap\nexcept ImportError:\n PyStringMap=None\n \n \n \n \n \nMARK=b'('\nSTOP=b'.'\nPOP=b'0'\nPOP_MARK=b'1'\nDUP=b'2'\nFLOAT=b'F'\nINT=b'I'\nBININT=b'J'\nBININT1=b'K'\nLONG=b'L'\nBININT2=b'M'\nNONE=b'N'\nPERSID=b'P'\nBINPERSID=b'Q'\nREDUCE=b'R'\nSTRING=b'S'\nBINSTRING=b'T'\nSHORT_BINSTRING=b'U'\nUNICODE=b'V'\nBINUNICODE=b'X'\nAPPEND=b'a'\nBUILD=b'b'\nGLOBAL=b'c'\nDICT=b'd'\nEMPTY_DICT=b'}'\nAPPENDS=b'e'\nGET=b'g'\nBINGET=b'h'\nINST=b'i'\nLONG_BINGET=b'j'\nLIST=b'l'\nEMPTY_LIST=b']'\nOBJ=b'o'\nPUT=b'p'\nBINPUT=b'q'\nLONG_BINPUT=b'r'\nSETITEM=b's'\nTUPLE=b't'\nEMPTY_TUPLE=b')'\nSETITEMS=b'u'\nBINFLOAT=b'G'\n\nTRUE=b'I01\\n'\nFALSE=b'I00\\n'\n\n\n\nPROTO=b'\\x80'\nNEWOBJ=b'\\x81'\nEXT1=b'\\x82'\nEXT2=b'\\x83'\nEXT4=b'\\x84'\nTUPLE1=b'\\x85'\nTUPLE2=b'\\x86'\nTUPLE3=b'\\x87'\nNEWTRUE=b'\\x88'\nNEWFALSE=b'\\x89'\nLONG1=b'\\x8a'\nLONG4=b'\\x8b'\n\n_tuplesize2code=[EMPTY_TUPLE,TUPLE1,TUPLE2,TUPLE3]\n\n\n\nBINBYTES=b'B'\nSHORT_BINBYTES=b'C'\n\n\nSHORT_BINUNICODE=b'\\x8c'\nBINUNICODE8=b'\\x8d'\nBINBYTES8=b'\\x8e'\nEMPTY_SET=b'\\x8f'\nADDITEMS=b'\\x90'\nFROZENSET=b'\\x91'\nNEWOBJ_EX=b'\\x92'\nSTACK_GLOBAL=b'\\x93'\nMEMOIZE=b'\\x94'\nFRAME=b'\\x95'\n\n__all__.extend([x for x in dir()if re.match(\"[A-Z][A-Z0-9_]+$\",x)])\n\n\nclass _Framer:\n\n _FRAME_SIZE_MIN=4\n _FRAME_SIZE_TARGET=64 *1024\n \n def __init__(self,file_write):\n self.file_write=file_write\n self.current_frame=None\n \n def start_framing(self):\n self.current_frame=io.BytesIO()\n \n def end_framing(self):\n if self.current_frame and self.current_frame.tell()>0:\n self.commit_frame(force=True )\n self.current_frame=None\n \n def commit_frame(self,force=False ):\n if self.current_frame:\n f=self.current_frame\n if f.tell()>=self._FRAME_SIZE_TARGET or force:\n data=f.getbuffer()\n write=self.file_write\n if len(data)>=self._FRAME_SIZE_MIN:\n \n \n \n \n write(FRAME+pack(\"':\n raise AttributeError(\"Can't get local attribute {!r} on {!r}\"\n .format(name,obj))\n try :\n parent=obj\n obj=getattr(obj,subpath)\n except AttributeError:\n raise AttributeError(\"Can't get attribute {!r} on {!r}\"\n .format(name,obj))from None\n return obj,parent\n \ndef whichmodule(obj,name):\n ''\n module_name=getattr(obj,'__module__',None )\n if module_name is not None :\n return module_name\n \n \n for module_name,module in list(sys.modules.items()):\n if module_name =='__main__'or module is None :\n continue\n try :\n if _getattribute(module,name)[0]is obj:\n return module_name\n except AttributeError:\n pass\n return '__main__'\n \ndef encode_long(x):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if x ==0:\n return b''\n nbytes=(x.bit_length()>>3)+1\n result=x.to_bytes(nbytes,byteorder='little',signed=True )\n if x <0 and nbytes >1:\n if result[-1]==0xff and (result[-2]&0x80)!=0:\n result=result[:-1]\n return result\n \ndef decode_long(data):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return int.from_bytes(data,byteorder='little',signed=True )\n \n \n \n \nclass _Pickler:\n\n def __init__(self,file,protocol=None ,*,fix_imports=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if protocol is None :\n protocol=DEFAULT_PROTOCOL\n if protocol <0:\n protocol=HIGHEST_PROTOCOL\n elif not 0 <=protocol <=HIGHEST_PROTOCOL:\n raise ValueError(\"pickle protocol must be <= %d\"%HIGHEST_PROTOCOL)\n try :\n self._file_write=file.write\n except AttributeError:\n raise TypeError(\"file must have a 'write' attribute\")\n self.framer=_Framer(self._file_write)\n self.write=self.framer.write\n self._write_large_bytes=self.framer.write_large_bytes\n self.memo={}\n self.proto=int(protocol)\n self.bin=protocol >=1\n self.fast=0\n self.fix_imports=fix_imports and protocol <3\n \n def clear_memo(self):\n ''\n\n\n\n\n\n \n self.memo.clear()\n \n def dump(self,obj):\n ''\n \n \n if not hasattr(self,\"_file_write\"):\n raise PicklingError(\"Pickler.__init__() was not called by \"\n \"%s.__init__()\"%(self.__class__.__name__,))\n if self.proto >=2:\n self.write(PROTO+pack(\"=4:\n self.framer.start_framing()\n self.save(obj)\n self.write(STOP)\n self.framer.end_framing()\n \n def memoize(self,obj):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.fast:\n return\n assert id(obj)not in self.memo\n idx=len(self.memo)\n self.write(self.put(idx))\n self.memo[id(obj)]=idx,obj\n \n \n def put(self,idx):\n if self.proto >=4:\n return MEMOIZE\n elif self.bin:\n if idx <256:\n return BINPUT+pack(\"=2 and func_name ==\"__newobj_ex__\":\n cls,args,kwargs=args\n if not hasattr(cls,\"__new__\"):\n raise PicklingError(\"args[0] from {} args has no __new__\"\n .format(func_name))\n if obj is not None and cls is not obj.__class__:\n raise PicklingError(\"args[0] from {} args has the wrong class\"\n .format(func_name))\n if self.proto >=4:\n save(cls)\n save(args)\n save(kwargs)\n write(NEWOBJ_EX)\n else :\n func=partial(cls.__new__,cls,*args,**kwargs)\n save(func)\n save(())\n write(REDUCE)\n elif self.proto >=2 and func_name ==\"__newobj__\":\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n cls=args[0]\n if not hasattr(cls,\"__new__\"):\n raise PicklingError(\n \"args[0] from __newobj__ args has no __new__\")\n if obj is not None and cls is not obj.__class__:\n raise PicklingError(\n \"args[0] from __newobj__ args has the wrong class\")\n args=args[1:]\n save(cls)\n save(args)\n write(NEWOBJ)\n else :\n save(func)\n save(args)\n write(REDUCE)\n \n if obj is not None :\n \n \n \n if id(obj)in self.memo:\n write(POP+self.get(self.memo[id(obj)][0]))\n else :\n self.memoize(obj)\n \n \n \n \n \n \n if listitems is not None :\n self._batch_appends(listitems)\n \n if dictitems is not None :\n self._batch_setitems(dictitems)\n \n if state is not None :\n save(state)\n write(BUILD)\n \n \n \n dispatch={}\n \n def save_none(self,obj):\n self.write(NONE)\n dispatch[type(None )]=save_none\n \n def save_bool(self,obj):\n if self.proto >=2:\n self.write(NEWTRUE if obj else NEWFALSE)\n else :\n self.write(TRUE if obj else FALSE)\n dispatch[bool]=save_bool\n \n def save_long(self,obj):\n if self.bin:\n \n \n \n \n if obj >=0:\n if obj <=0xff:\n self.write(BININT1+pack(\"=2:\n encoded=encode_long(obj)\n n=len(encoded)\n if n <256:\n self.write(LONG1+pack(\"d',obj))\n else :\n self.write(FLOAT+repr(obj).encode(\"ascii\")+b'\\n')\n dispatch[float]=save_float\n \n def save_bytes(self,obj):\n if self.proto <3:\n if not obj:\n self.save_reduce(bytes,(),obj=obj)\n else :\n self.save_reduce(codecs.encode,\n (str(obj,'latin1'),'latin1'),obj=obj)\n return\n n=len(obj)\n if n <=0xff:\n self.write(SHORT_BINBYTES+pack(\"0xffffffff and self.proto >=4:\n self._write_large_bytes(BINBYTES8+pack(\"=self.framer._FRAME_SIZE_TARGET:\n self._write_large_bytes(BINBYTES+pack(\"=4:\n self.write(SHORT_BINUNICODE+pack(\"0xffffffff and self.proto >=4:\n self._write_large_bytes(BINUNICODE8+pack(\"=self.framer._FRAME_SIZE_TARGET:\n self._write_large_bytes(BINUNICODE+pack(\"=2:\n for element in obj:\n save(element)\n \n if id(obj)in memo:\n get=self.get(memo[id(obj)][0])\n self.write(POP *n+get)\n else :\n self.write(_tuplesize2code[n])\n self.memoize(obj)\n return\n \n \n \n write=self.write\n write(MARK)\n for element in obj:\n save(element)\n \n if id(obj)in memo:\n \n \n \n \n \n \n \n get=self.get(memo[id(obj)][0])\n if self.bin:\n write(POP_MARK+get)\n else :\n write(POP *(n+1)+get)\n return\n \n \n write(TUPLE)\n self.memoize(obj)\n \n dispatch[tuple]=save_tuple\n \n def save_list(self,obj):\n if self.bin:\n self.write(EMPTY_LIST)\n else :\n self.write(MARK+LIST)\n \n self.memoize(obj)\n self._batch_appends(obj)\n \n dispatch[list]=save_list\n \n _BATCHSIZE=1000\n \n def _batch_appends(self,items):\n \n save=self.save\n write=self.write\n \n if not self.bin:\n for x in items:\n save(x)\n write(APPEND)\n return\n \n it=iter(items)\n while True :\n tmp=list(islice(it,self._BATCHSIZE))\n n=len(tmp)\n if n >1:\n write(MARK)\n for x in tmp:\n save(x)\n write(APPENDS)\n elif n:\n save(tmp[0])\n write(APPEND)\n \n if n 1:\n write(MARK)\n for k,v in tmp:\n save(k)\n save(v)\n write(SETITEMS)\n elif n:\n k,v=tmp[0]\n save(k)\n save(v)\n write(SETITEM)\n \n if n 0:\n write(MARK)\n for item in batch:\n save(item)\n write(ADDITEMS)\n if n =2:\n code=_extension_registry.get((module_name,name))\n if code:\n assert code >0\n if code <=0xff:\n write(EXT1+pack(\"=4:\n self.save(module_name)\n self.save(name)\n write(STACK_GLOBAL)\n elif parent is not module:\n self.save_reduce(getattr,(parent,lastname))\n elif self.proto >=3:\n write(GLOBAL+bytes(module_name,\"utf-8\")+b'\\n'+\n bytes(name,\"utf-8\")+b'\\n')\n else :\n if self.fix_imports:\n r_name_mapping=_compat_pickle.REVERSE_NAME_MAPPING\n r_import_mapping=_compat_pickle.REVERSE_IMPORT_MAPPING\n if (module_name,name)in r_name_mapping:\n module_name,name=r_name_mapping[(module_name,name)]\n elif module_name in r_import_mapping:\n module_name=r_import_mapping[module_name]\n try :\n write(GLOBAL+bytes(module_name,\"ascii\")+b'\\n'+\n bytes(name,\"ascii\")+b'\\n')\n except UnicodeEncodeError:\n raise PicklingError(\n \"can't pickle global identifier '%s.%s' using \"\n \"pickle protocol %i\"%(module,name,self.proto))from None\n \n self.memoize(obj)\n \n def save_type(self,obj):\n if obj is type(None ):\n return self.save_reduce(type,(None ,),obj=obj)\n elif obj is type(NotImplemented):\n return self.save_reduce(type,(NotImplemented,),obj=obj)\n elif obj is type(...):\n return self.save_reduce(type,(...,),obj=obj)\n return self.save_global(obj)\n \n dispatch[FunctionType]=save_global\n dispatch[type]=save_type\n \n \n \n \nclass _Unpickler:\n\n def __init__(self,file,*,fix_imports=True ,\n encoding=\"ASCII\",errors=\"strict\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._file_readline=file.readline\n self._file_read=file.read\n self.memo={}\n self.encoding=encoding\n self.errors=errors\n self.proto=0\n self.fix_imports=fix_imports\n \n def load(self):\n ''\n\n\n \n \n \n if not hasattr(self,\"_file_read\"):\n raise UnpicklingError(\"Unpickler.__init__() was not called by \"\n \"%s.__init__()\"%(self.__class__.__name__,))\n self._unframer=_Unframer(self._file_read,self._file_readline)\n self.read=self._unframer.read\n self.readline=self._unframer.readline\n self.metastack=[]\n self.stack=[]\n self.append=self.stack.append\n self.proto=0\n read=self.read\n dispatch=self.dispatch\n try :\n while True :\n key=read(1)\n if not key:\n raise EOFError\n assert isinstance(key,bytes_types)\n dispatch[key[0]](self)\n except _Stop as stopinst:\n return stopinst.value\n \n \n def pop_mark(self):\n items=self.stack\n self.stack=self.metastack.pop()\n self.append=self.stack.append\n return items\n \n def persistent_load(self,pid):\n raise UnpicklingError(\"unsupported persistent id encountered\")\n \n dispatch={}\n \n def load_proto(self):\n proto=self.read(1)[0]\n if not 0 <=proto <=HIGHEST_PROTOCOL:\n raise ValueError(\"unsupported pickle protocol: %d\"%proto)\n self.proto=proto\n dispatch[PROTO[0]]=load_proto\n \n def load_frame(self):\n frame_size,=unpack('sys.maxsize:\n raise ValueError(\"frame size > sys.maxsize: %d\"%frame_size)\n self._unframer.load_frame(frame_size)\n dispatch[FRAME[0]]=load_frame\n \n def load_persid(self):\n try :\n pid=self.readline()[:-1].decode(\"ascii\")\n except UnicodeDecodeError:\n raise UnpicklingError(\n \"persistent IDs in protocol 0 must be ASCII strings\")\n self.append(self.persistent_load(pid))\n dispatch[PERSID[0]]=load_persid\n \n def load_binpersid(self):\n pid=self.stack.pop()\n self.append(self.persistent_load(pid))\n dispatch[BINPERSID[0]]=load_binpersid\n \n def load_none(self):\n self.append(None )\n dispatch[NONE[0]]=load_none\n \n def load_false(self):\n self.append(False )\n dispatch[NEWFALSE[0]]=load_false\n \n def load_true(self):\n self.append(True )\n dispatch[NEWTRUE[0]]=load_true\n \n def load_int(self):\n data=self.readline()\n if data ==FALSE[1:]:\n val=False\n elif data ==TRUE[1:]:\n val=True\n else :\n val=int(data,0)\n self.append(val)\n dispatch[INT[0]]=load_int\n \n def load_binint(self):\n self.append(unpack('d',self.read(8))[0])\n dispatch[BINFLOAT[0]]=load_binfloat\n \n def _decode_string(self,value):\n \n \n \n if self.encoding ==\"bytes\":\n return value\n else :\n return value.decode(self.encoding,self.errors)\n \n def load_string(self):\n data=self.readline()[:-1]\n \n if len(data)>=2 and data[0]==data[-1]and data[0]in b'\"\\'':\n data=data[1:-1]\n else :\n raise UnpicklingError(\"the STRING opcode argument must be quoted\")\n self.append(self._decode_string(codecs.escape_decode(data)[0]))\n dispatch[STRING[0]]=load_string\n \n def load_binstring(self):\n \n len,=unpack('maxsize:\n raise UnpicklingError(\"BINBYTES exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(self.read(len))\n dispatch[BINBYTES[0]]=load_binbytes\n \n def load_unicode(self):\n self.append(str(self.readline()[:-1],'raw-unicode-escape'))\n dispatch[UNICODE[0]]=load_unicode\n \n def load_binunicode(self):\n len,=unpack('maxsize:\n raise UnpicklingError(\"BINUNICODE exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(str(self.read(len),'utf-8','surrogatepass'))\n dispatch[BINUNICODE[0]]=load_binunicode\n \n def load_binunicode8(self):\n len,=unpack('maxsize:\n raise UnpicklingError(\"BINUNICODE8 exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(str(self.read(len),'utf-8','surrogatepass'))\n dispatch[BINUNICODE8[0]]=load_binunicode8\n \n def load_binbytes8(self):\n len,=unpack('maxsize:\n raise UnpicklingError(\"BINBYTES8 exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(self.read(len))\n dispatch[BINBYTES8[0]]=load_binbytes8\n \n def load_short_binstring(self):\n len=self.read(1)[0]\n data=self.read(len)\n self.append(self._decode_string(data))\n dispatch[SHORT_BINSTRING[0]]=load_short_binstring\n \n def load_short_binbytes(self):\n len=self.read(1)[0]\n self.append(self.read(len))\n dispatch[SHORT_BINBYTES[0]]=load_short_binbytes\n \n def load_short_binunicode(self):\n len=self.read(1)[0]\n self.append(str(self.read(len),'utf-8','surrogatepass'))\n dispatch[SHORT_BINUNICODE[0]]=load_short_binunicode\n \n def load_tuple(self):\n items=self.pop_mark()\n self.append(tuple(items))\n dispatch[TUPLE[0]]=load_tuple\n \n def load_empty_tuple(self):\n self.append(())\n dispatch[EMPTY_TUPLE[0]]=load_empty_tuple\n \n def load_tuple1(self):\n self.stack[-1]=(self.stack[-1],)\n dispatch[TUPLE1[0]]=load_tuple1\n \n def load_tuple2(self):\n self.stack[-2:]=[(self.stack[-2],self.stack[-1])]\n dispatch[TUPLE2[0]]=load_tuple2\n \n def load_tuple3(self):\n self.stack[-3:]=[(self.stack[-3],self.stack[-2],self.stack[-1])]\n dispatch[TUPLE3[0]]=load_tuple3\n \n def load_empty_list(self):\n self.append([])\n dispatch[EMPTY_LIST[0]]=load_empty_list\n \n def load_empty_dictionary(self):\n self.append({})\n dispatch[EMPTY_DICT[0]]=load_empty_dictionary\n \n def load_empty_set(self):\n self.append(set())\n dispatch[EMPTY_SET[0]]=load_empty_set\n \n def load_frozenset(self):\n items=self.pop_mark()\n self.append(frozenset(items))\n dispatch[FROZENSET[0]]=load_frozenset\n \n def load_list(self):\n items=self.pop_mark()\n self.append(items)\n dispatch[LIST[0]]=load_list\n \n def load_dict(self):\n items=self.pop_mark()\n d={items[i]:items[i+1]\n for i in range(0,len(items),2)}\n self.append(d)\n dispatch[DICT[0]]=load_dict\n \n \n \n \n \n \n def _instantiate(self,klass,args):\n if (args or not isinstance(klass,type)or\n hasattr(klass,\"__getinitargs__\")):\n try :\n value=klass(*args)\n except TypeError as err:\n raise TypeError(\"in constructor for %s: %s\"%\n (klass.__name__,str(err)),sys.exc_info()[2])\n else :\n value=klass.__new__(klass)\n self.append(value)\n \n def load_inst(self):\n module=self.readline()[:-1].decode(\"ascii\")\n name=self.readline()[:-1].decode(\"ascii\")\n klass=self.find_class(module,name)\n self._instantiate(klass,self.pop_mark())\n dispatch[INST[0]]=load_inst\n \n def load_obj(self):\n \n args=self.pop_mark()\n cls=args.pop(0)\n self._instantiate(cls,args)\n dispatch[OBJ[0]]=load_obj\n \n def load_newobj(self):\n args=self.stack.pop()\n cls=self.stack.pop()\n obj=cls.__new__(cls,*args)\n self.append(obj)\n dispatch[NEWOBJ[0]]=load_newobj\n \n def load_newobj_ex(self):\n kwargs=self.stack.pop()\n args=self.stack.pop()\n cls=self.stack.pop()\n obj=cls.__new__(cls,*args,**kwargs)\n self.append(obj)\n dispatch[NEWOBJ_EX[0]]=load_newobj_ex\n \n def load_global(self):\n module=self.readline()[:-1].decode(\"utf-8\")\n name=self.readline()[:-1].decode(\"utf-8\")\n klass=self.find_class(module,name)\n self.append(klass)\n dispatch[GLOBAL[0]]=load_global\n \n def load_stack_global(self):\n name=self.stack.pop()\n module=self.stack.pop()\n if type(name)is not str or type(module)is not str:\n raise UnpicklingError(\"STACK_GLOBAL requires str\")\n self.append(self.find_class(module,name))\n dispatch[STACK_GLOBAL[0]]=load_stack_global\n \n def load_ext1(self):\n code=self.read(1)[0]\n self.get_extension(code)\n dispatch[EXT1[0]]=load_ext1\n \n def load_ext2(self):\n code,=unpack('=4:\n return _getattribute(sys.modules[module],name)[0]\n else :\n return getattr(sys.modules[module],name)\n \n def load_reduce(self):\n stack=self.stack\n args=stack.pop()\n func=stack[-1]\n stack[-1]=func(*args)\n dispatch[REDUCE[0]]=load_reduce\n \n def load_pop(self):\n if self.stack:\n del self.stack[-1]\n else :\n self.pop_mark()\n dispatch[POP[0]]=load_pop\n \n def load_pop_mark(self):\n self.pop_mark()\n dispatch[POP_MARK[0]]=load_pop_mark\n \n def load_dup(self):\n self.append(self.stack[-1])\n dispatch[DUP[0]]=load_dup\n \n def load_get(self):\n i=int(self.readline()[:-1])\n self.append(self.memo[i])\n dispatch[GET[0]]=load_get\n \n def load_binget(self):\n i=self.read(1)[0]\n self.append(self.memo[i])\n dispatch[BINGET[0]]=load_binget\n \n def load_long_binget(self):\n i,=unpack('maxsize:\n raise ValueError(\"negative LONG_BINPUT argument\")\n self.memo[i]=self.stack[-1]\n dispatch[LONG_BINPUT[0]]=load_long_binput\n \n def load_memoize(self):\n memo=self.memo\n memo[len(memo)]=self.stack[-1]\n dispatch[MEMOIZE[0]]=load_memoize\n \n def load_append(self):\n stack=self.stack\n value=stack.pop()\n list=stack[-1]\n list.append(value)\n dispatch[APPEND[0]]=load_append\n \n def load_appends(self):\n items=self.pop_mark()\n list_obj=self.stack[-1]\n try :\n extend=list_obj.extend\n except AttributeError:\n pass\n else :\n extend(items)\n return\n \n \n \n append=list_obj.append\n for item in items:\n append(item)\n dispatch[APPENDS[0]]=load_appends\n \n def load_setitem(self):\n stack=self.stack\n value=stack.pop()\n key=stack.pop()\n dict=stack[-1]\n dict[key]=value\n dispatch[SETITEM[0]]=load_setitem\n \n def load_setitems(self):\n items=self.pop_mark()\n dict=self.stack[-1]\n for i in range(0,len(items),2):\n dict[items[i]]=items[i+1]\n dispatch[SETITEMS[0]]=load_setitems\n \n def load_additems(self):\n items=self.pop_mark()\n set_obj=self.stack[-1]\n if isinstance(set_obj,set):\n set_obj.update(items)\n else :\n add=set_obj.add\n for item in items:\n add(item)\n dispatch[ADDITEMS[0]]=load_additems\n \n def load_build(self):\n stack=self.stack\n state=stack.pop()\n inst=stack[-1]\n setstate=getattr(inst,\"__setstate__\",None )\n if setstate is not None :\n setstate(state)\n return\n slotstate=None\n if isinstance(state,tuple)and len(state)==2:\n state,slotstate=state\n if state:\n inst_dict=inst.__dict__\n intern=sys.intern\n for k,v in state.items():\n if type(k)is str:\n inst_dict[intern(k)]=v\n else :\n inst_dict[k]=v\n if slotstate:\n for k,v in slotstate.items():\n setattr(inst,k,v)\n dispatch[BUILD[0]]=load_build\n \n def load_mark(self):\n self.metastack.append(self.stack)\n self.stack=[]\n self.append=self.stack.append\n dispatch[MARK[0]]=load_mark\n \n def load_stop(self):\n value=self.stack.pop()\n raise _Stop(value)\n dispatch[STOP[0]]=load_stop\n \n \n \n \ndef _dump(obj,file,protocol=None ,*,fix_imports=True ):\n _Pickler(file,protocol,fix_imports=fix_imports).dump(obj)\n \ndef _dumps(obj,protocol=None ,*,fix_imports=True ):\n f=io.BytesIO()\n _Pickler(f,protocol,fix_imports=fix_imports).dump(obj)\n res=f.getvalue()\n assert isinstance(res,bytes_types)\n return res\n \ndef _load(file,*,fix_imports=True ,encoding=\"ASCII\",errors=\"strict\"):\n return _Unpickler(file,fix_imports=fix_imports,\n encoding=encoding,errors=errors).load()\n \ndef _loads(s,*,fix_imports=True ,encoding=\"ASCII\",errors=\"strict\"):\n if isinstance(s,str):\n raise TypeError(\"Can't load pickle from unicode string\")\n file=io.BytesIO(s)\n return _Unpickler(file,fix_imports=fix_imports,\n encoding=encoding,errors=errors).load()\n \n \ntry :\n from _pickle import (\n PickleError,\n PicklingError,\n UnpicklingError,\n Pickler,\n Unpickler,\n dump,\n dumps,\n load,\n loads\n )\nexcept ImportError:\n Pickler,Unpickler=_Pickler,_Unpickler\n dump,dumps,load,loads=_dump,_dumps,_load,_loads\n \n \ndef _test():\n import doctest\n return doctest.testmod()\n \nif __name__ ==\"__main__\":\n import argparse\n parser=argparse.ArgumentParser(\n description='display contents of the pickle files')\n parser.add_argument(\n 'pickle_file',type=argparse.FileType('br'),\n nargs='*',help='the pickle file')\n parser.add_argument(\n '-t','--test',action='store_true',\n help='run self-test suite')\n parser.add_argument(\n '-v',action='store_true',\n help='run verbosely; only affects self-test run')\n args=parser.parse_args()\n if args.test:\n _test()\n else :\n if not args.pickle_file:\n parser.print_help()\n else :\n import pprint\n for f in args.pickle_file:\n obj=load(f)\n pprint.pprint(obj)\n", ["_compat_pickle", "_pickle", "argparse", "codecs", "copyreg", "doctest", "functools", "io", "itertools", "org.python.core", "pprint", "re", "struct", "sys", "types"]], "platform": [".py", "''\n\n\n\nfrom browser import self as window\n\ndef architecture(*args,**kw):\n return \"\",window.navigator.platform\n \ndef machine(*args,**kw):\n return ''\n \ndef node(*args,**kw):\n return ''\n \ndef platform(*args,**kw):\n return window.navigator.platform\n \ndef processor(*args,**kw):\n return ''\n \ndef python_build():\n return ('.'.join(map(str,__BRYTHON__.implementation[:-1])),\n __BRYTHON__.compiled_date)\n \ndef python_compiler():\n return ''\n \ndef python_branch():\n return ''\n \ndef python_implementation():\n return 'Brython'\n \ndef python_revision():\n return ''\n \ndef python_version():\n return '.'.join(map(str,__BRYTHON__.version_info[:3]))\n \ndef python_version_tuple():\n return __BRYTHON__.version_info[:3]\n \ndef release():\n return ''\n \ndef system():\n return window.navigator.platform\n \ndef system_alias(*args,**kw):\n return window.navigator.platform\n \ndef uname():\n from collections import namedtuple\n klass=namedtuple('uname_result',\n 'system node release version machine processor')\n return klass(window.navigator.platform,'','','','','')\n", ["browser", "collections"]], "posixpath": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ncurdir='.'\npardir='..'\nextsep='.'\nsep='/'\npathsep=':'\ndefpath=':/bin:/usr/bin'\naltsep=None\ndevnull='/dev/null'\n\nimport os\nimport sys\nimport stat\nimport genericpath\nfrom genericpath import *\n\n__all__=[\"normcase\",\"isabs\",\"join\",\"splitdrive\",\"split\",\"splitext\",\n\"basename\",\"dirname\",\"commonprefix\",\"getsize\",\"getmtime\",\n\"getatime\",\"getctime\",\"islink\",\"exists\",\"lexists\",\"isdir\",\"isfile\",\n\"ismount\",\"expanduser\",\"expandvars\",\"normpath\",\"abspath\",\n\"samefile\",\"sameopenfile\",\"samestat\",\n\"curdir\",\"pardir\",\"sep\",\"pathsep\",\"defpath\",\"altsep\",\"extsep\",\n\"devnull\",\"realpath\",\"supports_unicode_filenames\",\"relpath\",\n\"commonpath\"]\n\n\ndef _get_sep(path):\n if isinstance(path,bytes):\n return b'/'\n else :\n return '/'\n \n \n \n \n \n \ndef normcase(s):\n ''\n s=os.fspath(s)\n if not isinstance(s,(bytes,str)):\n raise TypeError(\"normcase() argument must be str or bytes, \"\n \"not '{}'\".format(s.__class__.__name__))\n return s\n \n \n \n \n \ndef isabs(s):\n ''\n s=os.fspath(s)\n sep=_get_sep(s)\n return s.startswith(sep)\n \n \n \n \n \n \ndef join(a,*p):\n ''\n\n\n \n a=os.fspath(a)\n sep=_get_sep(a)\n path=a\n try :\n if not p:\n path[:0]+sep\n for b in map(os.fspath,p):\n if b.startswith(sep):\n path=b\n elif not path or path.endswith(sep):\n path +=b\n else :\n path +=sep+b\n except (TypeError,AttributeError,BytesWarning):\n genericpath._check_arg_types('join',a,*p)\n raise\n return path\n \n \n \n \n \n \n \ndef split(p):\n ''\n \n p=os.fspath(p)\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n head,tail=p[:i],p[i:]\n if head and head !=sep *len(head):\n head=head.rstrip(sep)\n return head,tail\n \n \n \n \n \n \n \ndef splitext(p):\n p=os.fspath(p)\n if isinstance(p,bytes):\n sep=b'/'\n extsep=b'.'\n else :\n sep='/'\n extsep='.'\n return genericpath._splitext(p,sep,None ,extsep)\nsplitext.__doc__=genericpath._splitext.__doc__\n\n\n\n\ndef splitdrive(p):\n ''\n \n p=os.fspath(p)\n return p[:0],p\n \n \n \n \ndef basename(p):\n ''\n p=os.fspath(p)\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n return p[i:]\n \n \n \n \ndef dirname(p):\n ''\n p=os.fspath(p)\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n head=p[:i]\n if head and head !=sep *len(head):\n head=head.rstrip(sep)\n return head\n \n \n \n \n \ndef islink(path):\n ''\n try :\n st=os.lstat(path)\n except (OSError,AttributeError):\n return False\n return stat.S_ISLNK(st.st_mode)\n \n \n \ndef lexists(path):\n ''\n try :\n os.lstat(path)\n except OSError:\n return False\n return True\n \n \n \n \n \ndef ismount(path):\n ''\n try :\n s1=os.lstat(path)\n except OSError:\n \n return False\n else :\n \n if stat.S_ISLNK(s1.st_mode):\n return False\n \n if isinstance(path,bytes):\n parent=join(path,b'..')\n else :\n parent=join(path,'..')\n parent=realpath(parent)\n try :\n s2=os.lstat(parent)\n except OSError:\n return False\n \n dev1=s1.st_dev\n dev2=s2.st_dev\n if dev1 !=dev2:\n return True\n ino1=s1.st_ino\n ino2=s2.st_ino\n if ino1 ==ino2:\n return True\n return False\n \n \n \n \n \n \n \n \n \n \n \ndef expanduser(path):\n ''\n \n path=os.fspath(path)\n if isinstance(path,bytes):\n tilde=b'~'\n else :\n tilde='~'\n if not path.startswith(tilde):\n return path\n sep=_get_sep(path)\n i=path.find(sep,1)\n if i <0:\n i=len(path)\n if i ==1:\n if 'HOME'not in os.environ:\n import pwd\n userhome=pwd.getpwuid(os.getuid()).pw_dir\n else :\n userhome=os.environ['HOME']\n else :\n import pwd\n name=path[1:i]\n if isinstance(name,bytes):\n name=str(name,'ASCII')\n try :\n pwent=pwd.getpwnam(name)\n except KeyError:\n return path\n userhome=pwent.pw_dir\n if isinstance(path,bytes):\n userhome=os.fsencode(userhome)\n root=b'/'\n else :\n root='/'\n userhome=userhome.rstrip(root)\n return (userhome+path[i:])or root\n \n \n \n \n \n \n_varprog=None\n_varprogb=None\n\ndef expandvars(path):\n ''\n \n path=os.fspath(path)\n global _varprog,_varprogb\n if isinstance(path,bytes):\n if b'$'not in path:\n return path\n if not _varprogb:\n import re\n _varprogb=re.compile(br'\\$(\\w+|\\{[^}]*\\})',re.ASCII)\n search=_varprogb.search\n start=b'{'\n end=b'}'\n environ=getattr(os,'environb',None )\n else :\n if '$'not in path:\n return path\n if not _varprog:\n import re\n _varprog=re.compile(r'\\$(\\w+|\\{[^}]*\\})',re.ASCII)\n search=_varprog.search\n start='{'\n end='}'\n environ=os.environ\n i=0\n while True :\n m=search(path,i)\n if not m:\n break\n i,j=m.span(0)\n name=m.group(1)\n if name.startswith(start)and name.endswith(end):\n name=name[1:-1]\n try :\n if environ is None :\n value=os.fsencode(os.environ[os.fsdecode(name)])\n else :\n value=environ[name]\n except KeyError:\n i=j\n else :\n tail=path[j:]\n path=path[:i]+value\n i=len(path)\n path +=tail\n return path\n \n \n \n \n \n \ndef normpath(path):\n ''\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'/'\n empty=b''\n dot=b'.'\n dotdot=b'..'\n else :\n sep='/'\n empty=''\n dot='.'\n dotdot='..'\n if path ==empty:\n return dot\n initial_slashes=path.startswith(sep)\n \n \n if (initial_slashes and\n path.startswith(sep *2)and not path.startswith(sep *3)):\n initial_slashes=2\n comps=path.split(sep)\n new_comps=[]\n for comp in comps:\n if comp in (empty,dot):\n continue\n if (comp !=dotdot or (not initial_slashes and not new_comps)or\n (new_comps and new_comps[-1]==dotdot)):\n new_comps.append(comp)\n elif new_comps:\n new_comps.pop()\n comps=new_comps\n path=sep.join(comps)\n if initial_slashes:\n path=sep *initial_slashes+path\n return path or dot\n \n \ndef abspath(path):\n ''\n path=os.fspath(path)\n if not isabs(path):\n if isinstance(path,bytes):\n cwd=os.getcwdb()\n else :\n cwd=os.getcwd()\n path=join(cwd,path)\n return normpath(path)\n \n \n \n \n \ndef realpath(filename):\n ''\n \n filename=os.fspath(filename)\n path,ok=_joinrealpath(filename[:0],filename,{})\n return abspath(path)\n \n \n \ndef _joinrealpath(path,rest,seen):\n if isinstance(path,bytes):\n sep=b'/'\n curdir=b'.'\n pardir=b'..'\n else :\n sep='/'\n curdir='.'\n pardir='..'\n \n if isabs(rest):\n rest=rest[1:]\n path=sep\n \n while rest:\n name,_,rest=rest.partition(sep)\n if not name or name ==curdir:\n \n continue\n if name ==pardir:\n \n if path:\n path,name=split(path)\n if name ==pardir:\n path=join(path,pardir,pardir)\n else :\n path=pardir\n continue\n newpath=join(path,name)\n if not islink(newpath):\n path=newpath\n continue\n \n if newpath in seen:\n \n path=seen[newpath]\n if path is not None :\n \n continue\n \n \n return join(newpath,rest),False\n seen[newpath]=None\n path,ok=_joinrealpath(path,os.readlink(newpath),seen)\n if not ok:\n return join(path,rest),False\n seen[newpath]=path\n \n return path,True\n \n \nsupports_unicode_filenames=(sys.platform =='darwin')\n\ndef relpath(path,start=None ):\n ''\n \n if not path:\n raise ValueError(\"no path specified\")\n \n path=os.fspath(path)\n if isinstance(path,bytes):\n curdir=b'.'\n sep=b'/'\n pardir=b'..'\n else :\n curdir='.'\n sep='/'\n pardir='..'\n \n if start is None :\n start=curdir\n else :\n start=os.fspath(start)\n \n try :\n start_list=[x for x in abspath(start).split(sep)if x]\n path_list=[x for x in abspath(path).split(sep)if x]\n \n i=len(commonprefix([start_list,path_list]))\n \n rel_list=[pardir]*(len(start_list)-i)+path_list[i:]\n if not rel_list:\n return curdir\n return join(*rel_list)\n except (TypeError,AttributeError,BytesWarning,DeprecationWarning):\n genericpath._check_arg_types('relpath',path,start)\n raise\n \n \n \n \n \n \n \ndef commonpath(paths):\n ''\n \n if not paths:\n raise ValueError('commonpath() arg is an empty sequence')\n \n paths=tuple(map(os.fspath,paths))\n if isinstance(paths[0],bytes):\n sep=b'/'\n curdir=b'.'\n else :\n sep='/'\n curdir='.'\n \n try :\n split_paths=[path.split(sep)for path in paths]\n \n try :\n isabs,=set(p[:1]==sep for p in paths)\n except ValueError:\n raise ValueError(\"Can't mix absolute and relative paths\")from None\n \n split_paths=[[c for c in s if c and c !=curdir]for s in split_paths]\n s1=min(split_paths)\n s2=max(split_paths)\n common=s1\n for i,c in enumerate(s1):\n if c !=s2[i]:\n common=s1[:i]\n break\n \n prefix=sep if isabs else sep[:0]\n return prefix+sep.join(common)\n except (TypeError,AttributeError):\n genericpath._check_arg_types('commonpath',*paths)\n raise\n", ["genericpath", "os", "pwd", "re", "stat", "sys"]], "pprint": [".py", "\n\n\n\n\n\n\n\n\n\n\"\"\"Support to pretty-print lists, tuples, & dictionaries recursively.\n\nVery simple, but useful, especially in debugging data structures.\n\nClasses\n-------\n\nPrettyPrinter()\n Handle pretty-printing operations onto a stream using a configured\n set of formatting parameters.\n\nFunctions\n---------\n\npformat()\n Format a Python object into a pretty-printed representation.\n\npprint()\n Pretty-print a Python object to a stream [default is sys.stdout].\n\nsaferepr()\n Generate a 'standard' repr()-like value, but protect against recursive\n data structures.\n\n\"\"\"\n\nimport collections as _collections\nimport re\nimport sys as _sys\nimport types as _types\nfrom io import StringIO as _StringIO\n\n__all__=[\"pprint\",\"pformat\",\"isreadable\",\"isrecursive\",\"saferepr\",\n\"PrettyPrinter\"]\n\n\ndef pprint(object,stream=None ,indent=1,width=80,depth=None ,*,\ncompact=False ):\n ''\n printer=PrettyPrinter(\n stream=stream,indent=indent,width=width,depth=depth,\n compact=compact)\n printer.pprint(object)\n \ndef pformat(object,indent=1,width=80,depth=None ,*,compact=False ):\n ''\n return PrettyPrinter(indent=indent,width=width,depth=depth,\n compact=compact).pformat(object)\n \ndef saferepr(object):\n ''\n return _safe_repr(object,{},None ,0)[0]\n \ndef isreadable(object):\n ''\n return _safe_repr(object,{},None ,0)[1]\n \ndef isrecursive(object):\n ''\n return _safe_repr(object,{},None ,0)[2]\n \nclass _safe_key:\n ''\n\n\n\n\n\n\n \n \n __slots__=['obj']\n \n def __init__(self,obj):\n self.obj=obj\n \n def __lt__(self,other):\n try :\n return self.obj = 0')\n if depth is not None and depth <=0:\n raise ValueError('depth must be > 0')\n if not width:\n raise ValueError('width must be != 0')\n self._depth=depth\n self._indent_per_level=indent\n self._width=width\n if stream is not None :\n self._stream=stream\n else :\n self._stream=_sys.stdout\n self._compact=bool(compact)\n \n def pprint(self,object):\n self._format(object,self._stream,0,0,{},0)\n self._stream.write(\"\\n\")\n \n def pformat(self,object):\n sio=_StringIO()\n self._format(object,sio,0,0,{},0)\n return sio.getvalue()\n \n def isrecursive(self,object):\n return self.format(object,{},0,0)[2]\n \n def isreadable(self,object):\n s,readable,recursive=self.format(object,{},0,0)\n return readable and not recursive\n \n def _format(self,object,stream,indent,allowance,context,level):\n objid=id(object)\n if objid in context:\n stream.write(_recursion(object))\n self._recursive=True\n self._readable=False\n return\n rep=self._repr(object,context,level)\n max_width=self._width -indent -allowance\n if len(rep)>max_width:\n p=self._dispatch.get(type(object).__repr__,None )\n if p is not None :\n context[objid]=1\n p(self,object,stream,indent,allowance,context,level+1)\n del context[objid]\n return\n elif isinstance(object,dict):\n context[objid]=1\n self._pprint_dict(object,stream,indent,allowance,\n context,level+1)\n del context[objid]\n return\n stream.write(rep)\n \n _dispatch={}\n \n def _pprint_dict(self,object,stream,indent,allowance,context,level):\n write=stream.write\n write('{')\n if self._indent_per_level >1:\n write((self._indent_per_level -1)*' ')\n length=len(object)\n if length:\n items=sorted(object.items(),key=_safe_tuple)\n self._format_dict_items(items,stream,indent,allowance+1,\n context,level)\n write('}')\n \n _dispatch[dict.__repr__]=_pprint_dict\n \n def _pprint_ordered_dict(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'(')\n self._format(list(object.items()),stream,\n indent+len(cls.__name__)+1,allowance+1,\n context,level)\n stream.write(')')\n \n _dispatch[_collections.OrderedDict.__repr__]=_pprint_ordered_dict\n \n def _pprint_list(self,object,stream,indent,allowance,context,level):\n stream.write('[')\n self._format_items(object,stream,indent,allowance+1,\n context,level)\n stream.write(']')\n \n _dispatch[list.__repr__]=_pprint_list\n \n def _pprint_tuple(self,object,stream,indent,allowance,context,level):\n stream.write('(')\n endchar=',)'if len(object)==1 else ')'\n self._format_items(object,stream,indent,allowance+len(endchar),\n context,level)\n stream.write(endchar)\n \n _dispatch[tuple.__repr__]=_pprint_tuple\n \n def _pprint_set(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n typ=object.__class__\n if typ is set:\n stream.write('{')\n endchar='}'\n else :\n stream.write(typ.__name__+'({')\n endchar='})'\n indent +=len(typ.__name__)+1\n object=sorted(object,key=_safe_key)\n self._format_items(object,stream,indent,allowance+len(endchar),\n context,level)\n stream.write(endchar)\n \n _dispatch[set.__repr__]=_pprint_set\n _dispatch[frozenset.__repr__]=_pprint_set\n \n def _pprint_str(self,object,stream,indent,allowance,context,level):\n write=stream.write\n if not len(object):\n write(repr(object))\n return\n chunks=[]\n lines=object.splitlines(True )\n if level ==1:\n indent +=1\n allowance +=1\n max_width1=max_width=self._width -indent\n for i,line in enumerate(lines):\n rep=repr(line)\n if i ==len(lines)-1:\n max_width1 -=allowance\n if len(rep)<=max_width1:\n chunks.append(rep)\n else :\n \n parts=re.findall(r'\\S*\\s*',line)\n assert parts\n assert not parts[-1]\n parts.pop()\n max_width2=max_width\n current=''\n for j,part in enumerate(parts):\n candidate=current+part\n if j ==len(parts)-1 and i ==len(lines)-1:\n max_width2 -=allowance\n if len(repr(candidate))>max_width2:\n if current:\n chunks.append(repr(current))\n current=part\n else :\n current=candidate\n if current:\n chunks.append(repr(current))\n if len(chunks)==1:\n write(rep)\n return\n if level ==1:\n write('(')\n for i,rep in enumerate(chunks):\n if i >0:\n write('\\n'+' '*indent)\n write(rep)\n if level ==1:\n write(')')\n \n _dispatch[str.__repr__]=_pprint_str\n \n def _pprint_bytes(self,object,stream,indent,allowance,context,level):\n write=stream.write\n if len(object)<=4:\n write(repr(object))\n return\n parens=level ==1\n if parens:\n indent +=1\n allowance +=1\n write('(')\n delim=''\n for rep in _wrap_bytes_repr(object,self._width -indent,allowance):\n write(delim)\n write(rep)\n if not delim:\n delim='\\n'+' '*indent\n if parens:\n write(')')\n \n _dispatch[bytes.__repr__]=_pprint_bytes\n \n def _pprint_bytearray(self,object,stream,indent,allowance,context,level):\n write=stream.write\n write('bytearray(')\n self._pprint_bytes(bytes(object),stream,indent+10,\n allowance+1,context,level+1)\n write(')')\n \n _dispatch[bytearray.__repr__]=_pprint_bytearray\n \n def _pprint_mappingproxy(self,object,stream,indent,allowance,context,level):\n stream.write('mappingproxy(')\n self._format(object.copy(),stream,indent+13,allowance+1,\n context,level)\n stream.write(')')\n \n _dispatch[_types.MappingProxyType.__repr__]=_pprint_mappingproxy\n \n def _format_dict_items(self,items,stream,indent,allowance,context,\n level):\n write=stream.write\n indent +=self._indent_per_level\n delimnl=',\\n'+' '*indent\n last_index=len(items)-1\n for i,(key,ent)in enumerate(items):\n last=i ==last_index\n rep=self._repr(key,context,level)\n write(rep)\n write(': ')\n self._format(ent,stream,indent+len(rep)+2,\n allowance if last else 1,\n context,level)\n if not last:\n write(delimnl)\n \n def _format_items(self,items,stream,indent,allowance,context,level):\n write=stream.write\n indent +=self._indent_per_level\n if self._indent_per_level >1:\n write((self._indent_per_level -1)*' ')\n delimnl=',\\n'+' '*indent\n delim=''\n width=max_width=self._width -indent+1\n it=iter(items)\n try :\n next_ent=next(it)\n except StopIteration:\n return\n last=False\n while not last:\n ent=next_ent\n try :\n next_ent=next(it)\n except StopIteration:\n last=True\n max_width -=allowance\n width -=allowance\n if self._compact:\n rep=self._repr(ent,context,level)\n w=len(rep)+2\n if width =w:\n width -=w\n write(delim)\n delim=', '\n write(rep)\n continue\n write(delim)\n delim=delimnl\n self._format(ent,stream,indent,\n allowance if last else 1,\n context,level)\n \n def _repr(self,object,context,level):\n repr,readable,recursive=self.format(object,context.copy(),\n self._depth,level)\n if not readable:\n self._readable=False\n if recursive:\n self._recursive=True\n return repr\n \n def format(self,object,context,maxlevels,level):\n ''\n\n\n \n return _safe_repr(object,context,maxlevels,level)\n \n def _pprint_default_dict(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n rdf=self._repr(object.default_factory,context,level)\n cls=object.__class__\n indent +=len(cls.__name__)+1\n stream.write('%s(%s,\\n%s'%(cls.__name__,rdf,' '*indent))\n self._pprint_dict(object,stream,indent,allowance+1,context,level)\n stream.write(')')\n \n _dispatch[_collections.defaultdict.__repr__]=_pprint_default_dict\n \n def _pprint_counter(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'({')\n if self._indent_per_level >1:\n stream.write((self._indent_per_level -1)*' ')\n items=object.most_common()\n self._format_dict_items(items,stream,\n indent+len(cls.__name__)+1,allowance+2,\n context,level)\n stream.write('})')\n \n _dispatch[_collections.Counter.__repr__]=_pprint_counter\n \n def _pprint_chain_map(self,object,stream,indent,allowance,context,level):\n if not len(object.maps):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'(')\n indent +=len(cls.__name__)+1\n for i,m in enumerate(object.maps):\n if i ==len(object.maps)-1:\n self._format(m,stream,indent,allowance+1,context,level)\n stream.write(')')\n else :\n self._format(m,stream,indent,1,context,level)\n stream.write(',\\n'+' '*indent)\n \n _dispatch[_collections.ChainMap.__repr__]=_pprint_chain_map\n \n def _pprint_deque(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'(')\n indent +=len(cls.__name__)+1\n stream.write('[')\n if object.maxlen is None :\n self._format_items(object,stream,indent,allowance+2,\n context,level)\n stream.write('])')\n else :\n self._format_items(object,stream,indent,2,\n context,level)\n rml=self._repr(object.maxlen,context,level)\n stream.write('],\\n%smaxlen=%s)'%(' '*indent,rml))\n \n _dispatch[_collections.deque.__repr__]=_pprint_deque\n \n def _pprint_user_dict(self,object,stream,indent,allowance,context,level):\n self._format(object.data,stream,indent,allowance,context,level -1)\n \n _dispatch[_collections.UserDict.__repr__]=_pprint_user_dict\n \n def _pprint_user_list(self,object,stream,indent,allowance,context,level):\n self._format(object.data,stream,indent,allowance,context,level -1)\n \n _dispatch[_collections.UserList.__repr__]=_pprint_user_list\n \n def _pprint_user_string(self,object,stream,indent,allowance,context,level):\n self._format(object.data,stream,indent,allowance,context,level -1)\n \n _dispatch[_collections.UserString.__repr__]=_pprint_user_string\n \n \n \ndef _safe_repr(object,context,maxlevels,level):\n typ=type(object)\n if typ in _builtin_scalars:\n return repr(object),True ,False\n \n r=getattr(typ,\"__repr__\",None )\n if issubclass(typ,dict)and r is dict.__repr__:\n if not object:\n return \"{}\",True ,False\n objid=id(object)\n if maxlevels and level >=maxlevels:\n return \"{...}\",False ,objid in context\n if objid in context:\n return _recursion(object),False ,True\n context[objid]=1\n readable=True\n recursive=False\n components=[]\n append=components.append\n level +=1\n saferepr=_safe_repr\n items=sorted(object.items(),key=_safe_tuple)\n for k,v in items:\n krepr,kreadable,krecur=saferepr(k,context,maxlevels,level)\n vrepr,vreadable,vrecur=saferepr(v,context,maxlevels,level)\n append(\"%s: %s\"%(krepr,vrepr))\n readable=readable and kreadable and vreadable\n if krecur or vrecur:\n recursive=True\n del context[objid]\n return \"{%s}\"%\", \".join(components),readable,recursive\n \n if (issubclass(typ,list)and r is list.__repr__)or\\\n (issubclass(typ,tuple)and r is tuple.__repr__):\n if issubclass(typ,list):\n if not object:\n return \"[]\",True ,False\n format=\"[%s]\"\n elif len(object)==1:\n format=\"(%s,)\"\n else :\n if not object:\n return \"()\",True ,False\n format=\"(%s)\"\n objid=id(object)\n if maxlevels and level >=maxlevels:\n return format %\"...\",False ,objid in context\n if objid in context:\n return _recursion(object),False ,True\n context[objid]=1\n readable=True\n recursive=False\n components=[]\n append=components.append\n level +=1\n for o in object:\n orepr,oreadable,orecur=_safe_repr(o,context,maxlevels,level)\n append(orepr)\n if not oreadable:\n readable=False\n if orecur:\n recursive=True\n del context[objid]\n return format %\", \".join(components),readable,recursive\n \n rep=repr(object)\n return rep,(rep and not rep.startswith('<')),False\n \n_builtin_scalars=frozenset({str,bytes,bytearray,int,float,complex,\nbool,type(None )})\n\ndef _recursion(object):\n return (\"\"\n %(type(object).__name__,id(object)))\n \n \ndef _perfcheck(object=None ):\n import time\n if object is None :\n object=[(\"string\",(1,2),[3,4],{5:6,7:8})]*100000\n p=PrettyPrinter()\n t1=time.time()\n _safe_repr(object,{},None ,0)\n t2=time.time()\n p.pformat(object)\n t3=time.time()\n print(\"_safe_repr:\",t2 -t1)\n print(\"pformat:\",t3 -t2)\n \ndef _wrap_bytes_repr(object,width,allowance):\n current=b''\n last=len(object)//4 *4\n for i in range(0,len(object),4):\n part=object[i:i+4]\n candidate=current+part\n if i ==last:\n width -=allowance\n if len(repr(candidate))>width:\n if current:\n yield repr(current)\n current=part\n else :\n current=candidate\n if current:\n yield repr(current)\n \nif __name__ ==\"__main__\":\n _perfcheck()\n", ["collections", "io", "re", "sys", "time", "types"]], "profile": [".py", "#! /usr/bin/env python3\n\n\n\n\n\n\n\n\"\"\"Class for profiling Python code.\"\"\"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport time\nimport marshal\n\n__all__=[\"run\",\"runctx\",\"Profile\"]\n\n\n\n\n\n\n\n\n\nclass _Utils:\n ''\n\n\n \n \n def __init__(self,profiler):\n self.profiler=profiler\n \n def run(self,statement,filename,sort):\n prof=self.profiler()\n try :\n prof.run(statement)\n except SystemExit:\n pass\n finally :\n self._show(prof,filename,sort)\n \n def runctx(self,statement,globals,locals,filename,sort):\n prof=self.profiler()\n try :\n prof.runctx(statement,globals,locals)\n except SystemExit:\n pass\n finally :\n self._show(prof,filename,sort)\n \n def _show(self,prof,filename,sort):\n if filename is not None :\n prof.dump_stats(filename)\n else :\n prof.print_stats(sort)\n \n \n \n \n \n \n \ndef run(statement,filename=None ,sort=-1):\n ''\n\n\n\n\n\n\n\n\n \n return _Utils(Profile).run(statement,filename,sort)\n \ndef runctx(statement,globals,locals,filename=None ,sort=-1):\n ''\n\n\n\n \n return _Utils(Profile).runctx(statement,globals,locals,filename,sort)\n \n \nclass Profile:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n bias=0\n \n def __init__(self,timer=None ,bias=None ):\n self.timings={}\n self.cur=None\n self.cmd=\"\"\n self.c_func_name=\"\"\n \n if bias is None :\n bias=self.bias\n self.bias=bias\n \n if not timer:\n self.timer=self.get_time=time.process_time\n self.dispatcher=self.trace_dispatch_i\n else :\n self.timer=timer\n t=self.timer()\n try :\n length=len(t)\n except TypeError:\n self.get_time=timer\n self.dispatcher=self.trace_dispatch_i\n else :\n if length ==2:\n self.dispatcher=self.trace_dispatch\n else :\n self.dispatcher=self.trace_dispatch_l\n \n \n \n \n \n def get_time_timer(timer=timer,sum=sum):\n return sum(timer())\n self.get_time=get_time_timer\n self.t=self.get_time()\n self.simulate_call('profiler')\n \n \n \n def trace_dispatch(self,frame,event,arg):\n timer=self.timer\n t=timer()\n t=t[0]+t[1]-self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n t=timer()\n self.t=t[0]+t[1]\n else :\n r=timer()\n self.t=r[0]+r[1]-t\n \n \n \n \n def trace_dispatch_i(self,frame,event,arg):\n timer=self.timer\n t=timer()-self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n self.t=timer()\n else :\n self.t=timer()-t\n \n \n \n \n def trace_dispatch_mac(self,frame,event,arg):\n timer=self.timer\n t=timer()/60.0 -self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n self.t=timer()/60.0\n else :\n self.t=timer()/60.0 -t\n \n \n \n def trace_dispatch_l(self,frame,event,arg):\n get_time=self.get_time\n t=get_time()-self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n self.t=get_time()\n else :\n self.t=get_time()-t\n \n \n \n \n \n \n \n \n def trace_dispatch_exception(self,frame,t):\n rpt,rit,ret,rfn,rframe,rcur=self.cur\n if (rframe is not frame)and rcur:\n return self.trace_dispatch_return(rframe,t)\n self.cur=rpt,rit+t,ret,rfn,rframe,rcur\n return 1\n \n \n def trace_dispatch_call(self,frame,t):\n if self.cur and frame.f_back is not self.cur[-2]:\n rpt,rit,ret,rfn,rframe,rcur=self.cur\n if not isinstance(rframe,Profile.fake_frame):\n assert rframe.f_back is frame.f_back,(\"Bad call\",rfn,\n rframe,rframe.f_back,\n frame,frame.f_back)\n self.trace_dispatch_return(rframe,0)\n assert (self.cur is None or\\\n frame.f_back is self.cur[-2]),(\"Bad call\",\n self.cur[-3])\n fcode=frame.f_code\n fn=(fcode.co_filename,fcode.co_firstlineno,fcode.co_name)\n self.cur=(t,0,0,fn,frame,self.cur)\n timings=self.timings\n if fn in timings:\n cc,ns,tt,ct,callers=timings[fn]\n timings[fn]=cc,ns+1,tt,ct,callers\n else :\n timings[fn]=0,0,0,0,{}\n return 1\n \n def trace_dispatch_c_call(self,frame,t):\n fn=(\"\",0,self.c_func_name)\n self.cur=(t,0,0,fn,frame,self.cur)\n timings=self.timings\n if fn in timings:\n cc,ns,tt,ct,callers=timings[fn]\n timings[fn]=cc,ns+1,tt,ct,callers\n else :\n timings[fn]=0,0,0,0,{}\n return 1\n \n def trace_dispatch_return(self,frame,t):\n if frame is not self.cur[-2]:\n assert frame is self.cur[-2].f_back,(\"Bad return\",self.cur[-3])\n self.trace_dispatch_return(self.cur[-2],0)\n \n \n \n \n rpt,rit,ret,rfn,frame,rcur=self.cur\n rit=rit+t\n frame_total=rit+ret\n \n ppt,pit,pet,pfn,pframe,pcur=rcur\n self.cur=ppt,pit+rpt,pet+frame_total,pfn,pframe,pcur\n \n timings=self.timings\n cc,ns,tt,ct,callers=timings[rfn]\n if not ns:\n \n \n \n \n ct=ct+frame_total\n cc=cc+1\n \n if pfn in callers:\n callers[pfn]=callers[pfn]+1\n \n \n \n else :\n callers[pfn]=1\n \n timings[rfn]=cc,ns -1,tt+rit,ct,callers\n \n return 1\n \n \n dispatch={\n \"call\":trace_dispatch_call,\n \"exception\":trace_dispatch_exception,\n \"return\":trace_dispatch_return,\n \"c_call\":trace_dispatch_c_call,\n \"c_exception\":trace_dispatch_return,\n \"c_return\":trace_dispatch_return,\n }\n \n \n \n \n \n \n \n \n def set_cmd(self,cmd):\n if self.cur[-1]:return\n self.cmd=cmd\n self.simulate_call(cmd)\n \n class fake_code:\n def __init__(self,filename,line,name):\n self.co_filename=filename\n self.co_line=line\n self.co_name=name\n self.co_firstlineno=0\n \n def __repr__(self):\n return repr((self.co_filename,self.co_line,self.co_name))\n \n class fake_frame:\n def __init__(self,code,prior):\n self.f_code=code\n self.f_back=prior\n \n def simulate_call(self,name):\n code=self.fake_code('profile',0,name)\n if self.cur:\n pframe=self.cur[-2]\n else :\n pframe=None\n frame=self.fake_frame(code,pframe)\n self.dispatch['call'](self,frame,0)\n \n \n \n \n def simulate_cmd_complete(self):\n get_time=self.get_time\n t=get_time()-self.t\n while self.cur[-1]:\n \n \n self.dispatch['return'](self,self.cur[-2],t)\n t=0\n self.t=get_time()-t\n \n \n def print_stats(self,sort=-1):\n import pstats\n pstats.Stats(self).strip_dirs().sort_stats(sort).\\\n print_stats()\n \n def dump_stats(self,file):\n with open(file,'wb')as f:\n self.create_stats()\n marshal.dump(self.stats,f)\n \n def create_stats(self):\n self.simulate_cmd_complete()\n self.snapshot_stats()\n \n def snapshot_stats(self):\n self.stats={}\n for func,(cc,ns,tt,ct,callers)in self.timings.items():\n callers=callers.copy()\n nc=0\n for callcnt in callers.values():\n nc +=callcnt\n self.stats[func]=cc,nc,tt,ct,callers\n \n \n \n \n \n def run(self,cmd):\n import __main__\n dict=__main__.__dict__\n return self.runctx(cmd,dict,dict)\n \n def runctx(self,cmd,globals,locals):\n self.set_cmd(cmd)\n sys.setprofile(self.dispatcher)\n try :\n exec(cmd,globals,locals)\n finally :\n sys.setprofile(None )\n return self\n \n \n def runcall(self,func,*args,**kw):\n self.set_cmd(repr(func))\n sys.setprofile(self.dispatcher)\n try :\n return func(*args,**kw)\n finally :\n sys.setprofile(None )\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def calibrate(self,m,verbose=0):\n if self.__class__ is not Profile:\n raise TypeError(\"Subclasses must override .calibrate().\")\n \n saved_bias=self.bias\n self.bias=0\n try :\n return self._calibrate_inner(m,verbose)\n finally :\n self.bias=saved_bias\n \n def _calibrate_inner(self,m,verbose):\n get_time=self.get_time\n \n \n \n \n \n \n \n def f1(n):\n for i in range(n):\n x=1\n \n def f(m,f1=f1):\n for i in range(m):\n f1(100)\n \n f(m)\n \n \n t0=get_time()\n f(m)\n t1=get_time()\n elapsed_noprofile=t1 -t0\n if verbose:\n print(\"elapsed time without profiling =\",elapsed_noprofile)\n \n \n \n \n p=Profile()\n t0=get_time()\n p.runctx('f(m)',globals(),locals())\n t1=get_time()\n elapsed_profile=t1 -t0\n if verbose:\n print(\"elapsed time with profiling =\",elapsed_profile)\n \n \n total_calls=0.0\n reported_time=0.0\n for (filename,line,funcname),(cc,ns,tt,ct,callers)in\\\n p.timings.items():\n if funcname in (\"f\",\"f1\"):\n total_calls +=cc\n reported_time +=tt\n \n if verbose:\n print(\"'CPU seconds' profiler reported =\",reported_time)\n print(\"total # calls =\",total_calls)\n if total_calls !=m+1:\n raise ValueError(\"internal error: total calls = %d\"%total_calls)\n \n \n \n \n \n mean=(reported_time -elapsed_noprofile)/2.0 /total_calls\n if verbose:\n print(\"mean stopwatch overhead per profile event =\",mean)\n return mean\n \n \n \ndef main():\n import os\n from optparse import OptionParser\n \n usage=\"profile.py [-o output_file_path] [-s sort] scriptfile [arg] ...\"\n parser=OptionParser(usage=usage)\n parser.allow_interspersed_args=False\n parser.add_option('-o','--outfile',dest=\"outfile\",\n help=\"Save stats to \",default=None )\n parser.add_option('-s','--sort',dest=\"sort\",\n help=\"Sort order when printing to stdout, based on pstats.Stats class\",\n default=-1)\n \n if not sys.argv[1:]:\n parser.print_usage()\n sys.exit(2)\n \n (options,args)=parser.parse_args()\n sys.argv[:]=args\n \n if len(args)>0:\n progname=args[0]\n sys.path.insert(0,os.path.dirname(progname))\n with open(progname,'rb')as fp:\n code=compile(fp.read(),progname,'exec')\n globs={\n '__file__':progname,\n '__name__':'__main__',\n '__package__':None ,\n '__cached__':None ,\n }\n runctx(code,globs,None ,options.outfile,options.sort)\n else :\n parser.print_usage()\n return parser\n \n \nif __name__ =='__main__':\n main()\n", ["__main__", "marshal", "optparse", "os", "pstats", "sys", "time"]], "pwd": [".py", "\ndef getpwuid():\n pass\n", []], "pydoc": [".py", "#!/usr/bin/env python3\n''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['help']\n__author__=\"Ka-Ping Yee \"\n__date__=\"26 February 2001\"\n\n__credits__=\"\"\"Guido van Rossum, for an excellent programming language.\nTommy Burnette, the original creator of manpy.\nPaul Prescod, for all his work on onlinehelp.\nRichard Chamberlain, for the first implementation of textdoc.\n\"\"\"\n\n\n\n\n\n\n\n\nimport builtins\nimport importlib._bootstrap\nimport importlib._bootstrap_external\nimport importlib.machinery\nimport importlib.util\nimport inspect\nimport io\nimport os\nimport pkgutil\nimport platform\nimport re\nimport sys\nimport time\nimport tokenize\nimport urllib.parse\nimport warnings\nfrom collections import deque\nfrom reprlib import Repr\nfrom traceback import format_exception_only\n\n\n\n\ndef pathdirs():\n ''\n dirs=[]\n normdirs=[]\n for dir in sys.path:\n dir=os.path.abspath(dir or '.')\n normdir=os.path.normcase(dir)\n if normdir not in normdirs and os.path.isdir(dir):\n dirs.append(dir)\n normdirs.append(normdir)\n return dirs\n \ndef getdoc(object):\n ''\n result=inspect.getdoc(object)or inspect.getcomments(object)\n return result and re.sub('^ *\\n','',result.rstrip())or ''\n \ndef splitdoc(doc):\n ''\n lines=doc.strip().split('\\n')\n if len(lines)==1:\n return lines[0],''\n elif len(lines)>=2 and not lines[1].rstrip():\n return lines[0],'\\n'.join(lines[2:])\n return '','\\n'.join(lines)\n \ndef classname(object,modname):\n ''\n name=object.__name__\n if object.__module__ !=modname:\n name=object.__module__+'.'+name\n return name\n \ndef isdata(object):\n ''\n return not (inspect.ismodule(object)or inspect.isclass(object)or\n inspect.isroutine(object)or inspect.isframe(object)or\n inspect.istraceback(object)or inspect.iscode(object))\n \ndef replace(text,*pairs):\n ''\n while pairs:\n text=pairs[1].join(text.split(pairs[0]))\n pairs=pairs[2:]\n return text\n \ndef cram(text,maxlen):\n ''\n if len(text)>maxlen:\n pre=max(0,(maxlen -3)//2)\n post=max(0,maxlen -3 -pre)\n return text[:pre]+'...'+text[len(text)-post:]\n return text\n \n_re_stripid=re.compile(r' at 0x[0-9a-f]{6,16}(>+)$',re.IGNORECASE)\ndef stripid(text):\n ''\n \n return _re_stripid.sub(r'\\1',text)\n \ndef _is_some_method(obj):\n return (inspect.isfunction(obj)or\n inspect.ismethod(obj)or\n inspect.isbuiltin(obj)or\n inspect.ismethoddescriptor(obj))\n \ndef _is_bound_method(fn):\n ''\n\n\n \n if inspect.ismethod(fn):\n return True\n if inspect.isbuiltin(fn):\n self=getattr(fn,'__self__',None )\n return not (inspect.ismodule(self)or (self is None ))\n return False\n \n \ndef allmethods(cl):\n methods={}\n for key,value in inspect.getmembers(cl,_is_some_method):\n methods[key]=1\n for base in cl.__bases__:\n methods.update(allmethods(base))\n for key in methods.keys():\n methods[key]=getattr(cl,key)\n return methods\n \ndef _split_list(s,predicate):\n ''\n\n\n\n\n \n \n yes=[]\n no=[]\n for x in s:\n if predicate(x):\n yes.append(x)\n else :\n no.append(x)\n return yes,no\n \ndef visiblename(name,all=None ,obj=None ):\n ''\n \n \n if name in {'__author__','__builtins__','__cached__','__credits__',\n '__date__','__doc__','__file__','__spec__',\n '__loader__','__module__','__name__','__package__',\n '__path__','__qualname__','__slots__','__version__'}:\n return 0\n \n if name.startswith('__')and name.endswith('__'):return 1\n \n if name.startswith('_')and hasattr(obj,'_fields'):\n return True\n if all is not None :\n \n return name in all\n else :\n return not name.startswith('_')\n \ndef classify_class_attrs(object):\n ''\n results=[]\n for (name,kind,cls,value)in inspect.classify_class_attrs(object):\n if inspect.isdatadescriptor(value):\n kind='data descriptor'\n results.append((name,kind,cls,value))\n return results\n \ndef sort_attributes(attrs,object):\n ''\n \n \n fields=getattr(object,'_fields',[])\n try :\n field_order={name:i -len(fields)for (i,name)in enumerate(fields)}\n except TypeError:\n field_order={}\n keyfunc=lambda attr:(field_order.get(attr[0],0),attr[0])\n attrs.sort(key=keyfunc)\n \n \n \ndef ispackage(path):\n ''\n if os.path.isdir(path):\n for ext in ('.py','.pyc'):\n if os.path.isfile(os.path.join(path,'__init__'+ext)):\n return True\n return False\n \ndef source_synopsis(file):\n line=file.readline()\n while line[:1]=='#'or not line.strip():\n line=file.readline()\n if not line:break\n line=line.strip()\n if line[:4]=='r\"\"\"':line=line[1:]\n if line[:3]=='\"\"\"':\n line=line[3:]\n if line[-1:]=='\\\\':line=line[:-1]\n while not line.strip():\n line=file.readline()\n if not line:break\n result=line.split('\"\"\"')[0].strip()\n else :result=None\n return result\n \ndef synopsis(filename,cache={}):\n ''\n mtime=os.stat(filename).st_mtime\n lastupdate,result=cache.get(filename,(None ,None ))\n if lastupdate is None or lastupdate ','>')\n \n def repr(self,object):\n return Repr.repr(self,object)\n \n def repr1(self,x,level):\n if hasattr(type(x),'__name__'):\n methodname='repr_'+'_'.join(type(x).__name__.split())\n if hasattr(self,methodname):\n return getattr(self,methodname)(x,level)\n return self.escape(cram(stripid(repr(x)),self.maxother))\n \n def repr_string(self,x,level):\n test=cram(x,self.maxstring)\n testrepr=repr(test)\n if '\\\\'in test and '\\\\'not in replace(testrepr,r'\\\\',''):\n \n \n return 'r'+testrepr[0]+self.escape(test)+testrepr[0]\n return re.sub(r'((\\\\[\\\\abfnrtv\\'\"]|\\\\[0-9]..|\\\\x..|\\\\u....)+)',\n r'\\1',\n self.escape(testrepr))\n \n repr_str=repr_string\n \n def repr_instance(self,x,level):\n try :\n return self.escape(cram(stripid(repr(x)),self.maxstring))\n except :\n return self.escape('<%s instance>'%x.__class__.__name__)\n \n repr_unicode=repr_string\n \nclass HTMLDoc(Doc):\n ''\n \n \n \n _repr_instance=HTMLRepr()\n repr=_repr_instance.repr\n escape=_repr_instance.escape\n \n def page(self,title,contents):\n ''\n return '''\\\n\nPython: %s\n\n\n%s\n'''%(title,contents)\n \n def heading(self,title,fgcol,bgcol,extras=''):\n ''\n return '''\n\n\n
 
\n 
%s
%s
\n '''%(bgcol,fgcol,title,fgcol,extras or ' ')\n \n def section(self,title,fgcol,bgcol,contents,width=6,\n prelude='',marginalia=None ,gap=' '):\n ''\n if marginalia is None :\n marginalia=''+' '*width+''\n result='''

\n\n\n\n '''%(bgcol,fgcol,title)\n if prelude:\n result=result+'''\n\n\n'''%(bgcol,marginalia,prelude,gap)\n else :\n result=result+'''\n'''%(bgcol,marginalia,gap)\n \n return result+'\\n
 
\n%s
%s%s
%s
%s%s%s
'%contents\n \n def bigsection(self,title,*args):\n ''\n title='%s'%title\n return self.section(title,*args)\n \n def preformat(self,text):\n ''\n text=self.escape(text.expandtabs())\n return replace(text,'\\n\\n','\\n \\n','\\n\\n','\\n \\n',\n ' ',' ','\\n','
\\n')\n \n def multicolumn(self,list,format,cols=4):\n ''\n result=''\n rows=(len(list)+cols -1)//cols\n for col in range(cols):\n result=result+''%(100 //cols)\n for i in range(rows *col,rows *col+rows):\n if i \\n'\n result=result+''\n return '%s
'%result\n \n def grey(self,text):return '%s'%text\n \n def namelink(self,name,*dicts):\n ''\n for dict in dicts:\n if name in dict:\n return '
%s'%(dict[name],name)\n return name\n \n def classlink(self,object,modname):\n ''\n name,module=object.__name__,sys.modules.get(object.__module__)\n if hasattr(module,name)and getattr(module,name)is object:\n return '%s'%(\n module.__name__,name,classname(object,modname))\n return classname(object,modname)\n \n def modulelink(self,object):\n ''\n return '%s'%(object.__name__,object.__name__)\n \n def modpkglink(self,modpkginfo):\n ''\n name,path,ispackage,shadowed=modpkginfo\n if shadowed:\n return self.grey(name)\n if path:\n url='%s.%s.html'%(path,name)\n else :\n url='%s.html'%name\n if ispackage:\n text='%s (package)'%name\n else :\n text=name\n return '%s'%(url,text)\n \n def filelink(self,url,path):\n ''\n return '%s'%(url,path)\n \n def markup(self,text,escape=None ,funcs={},classes={},methods={}):\n ''\n \n escape=escape or self.escape\n results=[]\n here=0\n pattern=re.compile(r'\\b((http|ftp)://\\S+[\\w/]|'\n r'RFC[- ]?(\\d+)|'\n r'PEP[- ]?(\\d+)|'\n r'(self\\.)?(\\w+))')\n while True :\n match=pattern.search(text,here)\n if not match:break\n start,end=match.span()\n results.append(escape(text[here:start]))\n \n all,scheme,rfc,pep,selfdot,name=match.groups()\n if scheme:\n url=escape(all).replace('\"','"')\n results.append('%s'%(url,url))\n elif rfc:\n url='http://www.rfc-editor.org/rfc/rfc%d.txt'%int(rfc)\n results.append('%s'%(url,escape(all)))\n elif pep:\n url='http://www.python.org/dev/peps/pep-%04d/'%int(pep)\n results.append('%s'%(url,escape(all)))\n elif selfdot:\n \n \n if text[end:end+1]=='(':\n results.append('self.'+self.namelink(name,methods))\n else :\n results.append('self.%s'%name)\n elif text[end:end+1]=='(':\n results.append(self.namelink(name,methods,funcs,classes))\n else :\n results.append(self.namelink(name,classes))\n here=end\n results.append(escape(text[here:]))\n return ''.join(results)\n \n \n \n def formattree(self,tree,modname,parent=None ):\n ''\n result=''\n for entry in tree:\n if type(entry)is type(()):\n c,bases=entry\n result=result+'

'\n result=result+self.classlink(c,modname)\n if bases and bases !=(parent,):\n parents=[]\n for base in bases:\n parents.append(self.classlink(base,modname))\n result=result+'('+', '.join(parents)+')'\n result=result+'\\n
'\n elif type(entry)is type([]):\n result=result+'
\\n%s
\\n'%self.formattree(\n entry,modname,c)\n return '
\\n%s
\\n'%result\n \n def docmodule(self,object,name=None ,mod=None ,*ignored):\n ''\n name=object.__name__\n try :\n all=object.__all__\n except AttributeError:\n all=None\n parts=name.split('.')\n links=[]\n for i in range(len(parts)-1):\n links.append(\n '%s'%\n ('.'.join(parts[:i+1]),parts[i]))\n linkedname='.'.join(links+parts[-1:])\n head='%s'%linkedname\n try :\n path=inspect.getabsfile(object)\n url=urllib.parse.quote(path)\n filelink=self.filelink(url,path)\n except TypeError:\n filelink='(built-in)'\n info=[]\n if hasattr(object,'__version__'):\n version=str(object.__version__)\n if version[:11]=='$'+'Revision: 'and version[-1:]=='$':\n version=version[11:-1].strip()\n info.append('version %s'%self.escape(version))\n if hasattr(object,'__date__'):\n info.append(self.escape(str(object.__date__)))\n if info:\n head=head+' (%s)'%', '.join(info)\n docloc=self.getdocloc(object)\n if docloc is not None :\n docloc='
Module Reference'%locals()\n else :\n docloc=''\n result=self.heading(\n head,'#ffffff','#7799ee',\n 'index
'+filelink+docloc)\n \n modules=inspect.getmembers(object,inspect.ismodule)\n \n classes,cdict=[],{}\n for key,value in inspect.getmembers(object,inspect.isclass):\n \n if (all is not None or\n (inspect.getmodule(value)or object)is object):\n if visiblename(key,all,object):\n classes.append((key,value))\n cdict[key]=cdict[value]='#'+key\n for key,value in classes:\n for base in value.__bases__:\n key,modname=base.__name__,base.__module__\n module=sys.modules.get(modname)\n if modname !=name and module and hasattr(module,key):\n if getattr(module,key)is base:\n if not key in cdict:\n cdict[key]=cdict[base]=modname+'.html#'+key\n funcs,fdict=[],{}\n for key,value in inspect.getmembers(object,inspect.isroutine):\n \n if (all is not None or\n inspect.isbuiltin(value)or inspect.getmodule(value)is object):\n if visiblename(key,all,object):\n funcs.append((key,value))\n fdict[key]='#-'+key\n if inspect.isfunction(value):fdict[value]=fdict[key]\n data=[]\n for key,value in inspect.getmembers(object,isdata):\n if visiblename(key,all,object):\n data.append((key,value))\n \n doc=self.markup(getdoc(object),self.preformat,fdict,cdict)\n doc=doc and '%s'%doc\n result=result+'

%s

\\n'%doc\n \n if hasattr(object,'__path__'):\n modpkgs=[]\n for importer,modname,ispkg in pkgutil.iter_modules(object.__path__):\n modpkgs.append((modname,name,ispkg,0))\n modpkgs.sort()\n contents=self.multicolumn(modpkgs,self.modpkglink)\n result=result+self.bigsection(\n 'Package Contents','#ffffff','#aa55cc',contents)\n elif modules:\n contents=self.multicolumn(\n modules,lambda t:self.modulelink(t[1]))\n result=result+self.bigsection(\n 'Modules','#ffffff','#aa55cc',contents)\n \n if classes:\n classlist=[value for (key,value)in classes]\n contents=[\n self.formattree(inspect.getclasstree(classlist,1),name)]\n for key,value in classes:\n contents.append(self.document(value,key,name,fdict,cdict))\n result=result+self.bigsection(\n 'Classes','#ffffff','#ee77aa',' '.join(contents))\n if funcs:\n contents=[]\n for key,value in funcs:\n contents.append(self.document(value,key,name,fdict,cdict))\n result=result+self.bigsection(\n 'Functions','#ffffff','#eeaa77',' '.join(contents))\n if data:\n contents=[]\n for key,value in data:\n contents.append(self.document(value,key))\n result=result+self.bigsection(\n 'Data','#ffffff','#55aa55','
\\n'.join(contents))\n if hasattr(object,'__author__'):\n contents=self.markup(str(object.__author__),self.preformat)\n result=result+self.bigsection(\n 'Author','#ffffff','#7799ee',contents)\n if hasattr(object,'__credits__'):\n contents=self.markup(str(object.__credits__),self.preformat)\n result=result+self.bigsection(\n 'Credits','#ffffff','#7799ee',contents)\n \n return result\n \n def docclass(self,object,name=None ,mod=None ,funcs={},classes={},\n *ignored):\n ''\n realname=object.__name__\n name=name or realname\n bases=object.__bases__\n \n contents=[]\n push=contents.append\n \n \n class HorizontalRule:\n def __init__(self):\n self.needone=0\n def maybe(self):\n if self.needone:\n push('
\\n')\n self.needone=1\n hr=HorizontalRule()\n \n \n mro=deque(inspect.getmro(object))\n if len(mro)>2:\n hr.maybe()\n push('
Method resolution order:
\\n')\n for base in mro:\n push('
%s
\\n'%self.classlink(base,\n object.__module__))\n push('
\\n')\n \n def spill(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n try :\n value=getattr(object,name)\n except Exception:\n \n \n push(self._docdescriptor(name,value,mod))\n else :\n push(self.document(value,name,mod,\n funcs,classes,mdict,object))\n push('\\n')\n return attrs\n \n def spilldescriptors(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n push(self._docdescriptor(name,value,mod))\n return attrs\n \n def spilldata(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n base=self.docother(getattr(object,name),name,mod)\n if callable(value)or inspect.isdatadescriptor(value):\n doc=getattr(value,\"__doc__\",None )\n else :\n doc=None\n if doc is None :\n push('
%s
\\n'%base)\n else :\n doc=self.markup(getdoc(value),self.preformat,\n funcs,classes,mdict)\n doc='
%s'%doc\n push('
%s%s
\\n'%(base,doc))\n push('\\n')\n return attrs\n \n attrs=[(name,kind,cls,value)\n for name,kind,cls,value in classify_class_attrs(object)\n if visiblename(name,obj=object)]\n \n mdict={}\n for key,kind,homecls,value in attrs:\n mdict[key]=anchor='#'+name+'-'+key\n try :\n value=getattr(object,name)\n except Exception:\n \n \n pass\n try :\n \n \n mdict[value]=anchor\n except TypeError:\n pass\n \n while attrs:\n if mro:\n thisclass=mro.popleft()\n else :\n thisclass=attrs[0][2]\n attrs,inherited=_split_list(attrs,lambda t:t[2]is thisclass)\n \n if thisclass is builtins.object:\n attrs=inherited\n continue\n elif thisclass is object:\n tag='defined here'\n else :\n tag='inherited from %s'%self.classlink(thisclass,\n object.__module__)\n tag +=':
\\n'\n \n sort_attributes(attrs,object)\n \n \n attrs=spill('Methods %s'%tag,attrs,\n lambda t:t[1]=='method')\n attrs=spill('Class methods %s'%tag,attrs,\n lambda t:t[1]=='class method')\n attrs=spill('Static methods %s'%tag,attrs,\n lambda t:t[1]=='static method')\n attrs=spilldescriptors('Data descriptors %s'%tag,attrs,\n lambda t:t[1]=='data descriptor')\n attrs=spilldata('Data and other attributes %s'%tag,attrs,\n lambda t:t[1]=='data')\n assert attrs ==[]\n attrs=inherited\n \n contents=''.join(contents)\n \n if name ==realname:\n title='class %s'%(\n name,realname)\n else :\n title='%s = class %s'%(\n name,name,realname)\n if bases:\n parents=[]\n for base in bases:\n parents.append(self.classlink(base,object.__module__))\n title=title+'(%s)'%', '.join(parents)\n \n decl=''\n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if argspec and argspec !='()':\n decl=name+self.escape(argspec)+'\\n\\n'\n \n doc=getdoc(object)\n if decl:\n doc=decl+(doc or '')\n doc=self.markup(doc,self.preformat,funcs,classes,mdict)\n doc=doc and '%s
 
'%doc\n \n return self.section(title,'#000000','#ffc8d8',contents,3,doc)\n \n def formatvalue(self,object):\n ''\n return self.grey('='+self.repr(object))\n \n def docroutine(self,object,name=None ,mod=None ,\n funcs={},classes={},methods={},cl=None ):\n ''\n realname=object.__name__\n name=name or realname\n anchor=(cl and cl.__name__ or '')+'-'+name\n note=''\n skipdocs=0\n if _is_bound_method(object):\n imclass=object.__self__.__class__\n if cl:\n if imclass is not cl:\n note=' from '+self.classlink(imclass,mod)\n else :\n if object.__self__ is not None :\n note=' method of %s instance'%self.classlink(\n object.__self__.__class__,mod)\n else :\n note=' unbound %s method'%self.classlink(imclass,mod)\n \n if name ==realname:\n title='%s'%(anchor,realname)\n else :\n if (cl and realname in cl.__dict__ and\n cl.__dict__[realname]is object):\n reallink='%s'%(\n cl.__name__+'-'+realname,realname)\n skipdocs=1\n else :\n reallink=realname\n title='%s = %s'%(\n anchor,name,reallink)\n argspec=None\n if inspect.isroutine(object):\n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if realname =='':\n title='%s lambda '%name\n \n \n \n argspec=argspec[1:-1]\n if not argspec:\n argspec='(...)'\n \n decl=title+self.escape(argspec)+(note and self.grey(\n '%s'%note))\n \n if skipdocs:\n return '
%s
\\n'%decl\n else :\n doc=self.markup(\n getdoc(object),self.preformat,funcs,classes,methods)\n doc=doc and '
%s
'%doc\n return '
%s
%s
\\n'%(decl,doc)\n \n def _docdescriptor(self,name,value,mod):\n results=[]\n push=results.append\n \n if name:\n push('
%s
\\n'%name)\n if value.__doc__ is not None :\n doc=self.markup(getdoc(value),self.preformat)\n push('
%s
\\n'%doc)\n push('
\\n')\n \n return ''.join(results)\n \n def docproperty(self,object,name=None ,mod=None ,cl=None ):\n ''\n return self._docdescriptor(name,object,mod)\n \n def docother(self,object,name=None ,mod=None ,*ignored):\n ''\n lhs=name and '%s = '%name or ''\n return lhs+self.repr(object)\n \n def docdata(self,object,name=None ,mod=None ,cl=None ):\n ''\n return self._docdescriptor(name,object,mod)\n \n def index(self,dir,shadowed=None ):\n ''\n modpkgs=[]\n if shadowed is None :shadowed={}\n for importer,name,ispkg in pkgutil.iter_modules([dir]):\n if any((0xD800 <=ord(ch)<=0xDFFF)for ch in name):\n \n continue\n modpkgs.append((name,'',ispkg,name in shadowed))\n shadowed[name]=1\n \n modpkgs.sort()\n contents=self.multicolumn(modpkgs,self.modpkglink)\n return self.bigsection(dir,'#ffffff','#ee77aa',contents)\n \n \n \nclass TextRepr(Repr):\n ''\n def __init__(self):\n Repr.__init__(self)\n self.maxlist=self.maxtuple=20\n self.maxdict=10\n self.maxstring=self.maxother=100\n \n def repr1(self,x,level):\n if hasattr(type(x),'__name__'):\n methodname='repr_'+'_'.join(type(x).__name__.split())\n if hasattr(self,methodname):\n return getattr(self,methodname)(x,level)\n return cram(stripid(repr(x)),self.maxother)\n \n def repr_string(self,x,level):\n test=cram(x,self.maxstring)\n testrepr=repr(test)\n if '\\\\'in test and '\\\\'not in replace(testrepr,r'\\\\',''):\n \n \n return 'r'+testrepr[0]+test+testrepr[0]\n return testrepr\n \n repr_str=repr_string\n \n def repr_instance(self,x,level):\n try :\n return cram(stripid(repr(x)),self.maxstring)\n except :\n return '<%s instance>'%x.__class__.__name__\n \nclass TextDoc(Doc):\n ''\n \n \n \n _repr_instance=TextRepr()\n repr=_repr_instance.repr\n \n def bold(self,text):\n ''\n return ''.join(ch+'\\b'+ch for ch in text)\n \n def indent(self,text,prefix=' '):\n ''\n if not text:return ''\n lines=[prefix+line for line in text.split('\\n')]\n if lines:lines[-1]=lines[-1].rstrip()\n return '\\n'.join(lines)\n \n def section(self,title,contents):\n ''\n clean_contents=self.indent(contents).rstrip()\n return self.bold(title)+'\\n'+clean_contents+'\\n\\n'\n \n \n \n def formattree(self,tree,modname,parent=None ,prefix=''):\n ''\n result=''\n for entry in tree:\n if type(entry)is type(()):\n c,bases=entry\n result=result+prefix+classname(c,modname)\n if bases and bases !=(parent,):\n parents=(classname(c,modname)for c in bases)\n result=result+'(%s)'%', '.join(parents)\n result=result+'\\n'\n elif type(entry)is type([]):\n result=result+self.formattree(\n entry,modname,c,prefix+' ')\n return result\n \n def docmodule(self,object,name=None ,mod=None ):\n ''\n name=object.__name__\n synop,desc=splitdoc(getdoc(object))\n result=self.section('NAME',name+(synop and ' - '+synop))\n all=getattr(object,'__all__',None )\n docloc=self.getdocloc(object)\n if docloc is not None :\n result=result+self.section('MODULE REFERENCE',docloc+\"\"\"\n\nThe following documentation is automatically generated from the Python\nsource files. It may be incomplete, incorrect or include features that\nare considered implementation detail and may vary between Python\nimplementations. When in doubt, consult the module reference at the\nlocation listed above.\n\"\"\")\n \n if desc:\n result=result+self.section('DESCRIPTION',desc)\n \n classes=[]\n for key,value in inspect.getmembers(object,inspect.isclass):\n \n if (all is not None\n or (inspect.getmodule(value)or object)is object):\n if visiblename(key,all,object):\n classes.append((key,value))\n funcs=[]\n for key,value in inspect.getmembers(object,inspect.isroutine):\n \n if (all is not None or\n inspect.isbuiltin(value)or inspect.getmodule(value)is object):\n if visiblename(key,all,object):\n funcs.append((key,value))\n data=[]\n for key,value in inspect.getmembers(object,isdata):\n if visiblename(key,all,object):\n data.append((key,value))\n \n modpkgs=[]\n modpkgs_names=set()\n if hasattr(object,'__path__'):\n for importer,modname,ispkg in pkgutil.iter_modules(object.__path__):\n modpkgs_names.add(modname)\n if ispkg:\n modpkgs.append(modname+' (package)')\n else :\n modpkgs.append(modname)\n \n modpkgs.sort()\n result=result+self.section(\n 'PACKAGE CONTENTS','\\n'.join(modpkgs))\n \n \n submodules=[]\n for key,value in inspect.getmembers(object,inspect.ismodule):\n if value.__name__.startswith(name+'.')and key not in modpkgs_names:\n submodules.append(key)\n if submodules:\n submodules.sort()\n result=result+self.section(\n 'SUBMODULES','\\n'.join(submodules))\n \n if classes:\n classlist=[value for key,value in classes]\n contents=[self.formattree(\n inspect.getclasstree(classlist,1),name)]\n for key,value in classes:\n contents.append(self.document(value,key,name))\n result=result+self.section('CLASSES','\\n'.join(contents))\n \n if funcs:\n contents=[]\n for key,value in funcs:\n contents.append(self.document(value,key,name))\n result=result+self.section('FUNCTIONS','\\n'.join(contents))\n \n if data:\n contents=[]\n for key,value in data:\n contents.append(self.docother(value,key,name,maxlen=70))\n result=result+self.section('DATA','\\n'.join(contents))\n \n if hasattr(object,'__version__'):\n version=str(object.__version__)\n if version[:11]=='$'+'Revision: 'and version[-1:]=='$':\n version=version[11:-1].strip()\n result=result+self.section('VERSION',version)\n if hasattr(object,'__date__'):\n result=result+self.section('DATE',str(object.__date__))\n if hasattr(object,'__author__'):\n result=result+self.section('AUTHOR',str(object.__author__))\n if hasattr(object,'__credits__'):\n result=result+self.section('CREDITS',str(object.__credits__))\n try :\n file=inspect.getabsfile(object)\n except TypeError:\n file='(built-in)'\n result=result+self.section('FILE',file)\n return result\n \n def docclass(self,object,name=None ,mod=None ,*ignored):\n ''\n realname=object.__name__\n name=name or realname\n bases=object.__bases__\n \n def makename(c,m=object.__module__):\n return classname(c,m)\n \n if name ==realname:\n title='class '+self.bold(realname)\n else :\n title=self.bold(name)+' = class '+realname\n if bases:\n parents=map(makename,bases)\n title=title+'(%s)'%', '.join(parents)\n \n contents=[]\n push=contents.append\n \n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if argspec and argspec !='()':\n push(name+argspec+'\\n')\n \n doc=getdoc(object)\n if doc:\n push(doc+'\\n')\n \n \n mro=deque(inspect.getmro(object))\n if len(mro)>2:\n push(\"Method resolution order:\")\n for base in mro:\n push(' '+makename(base))\n push('')\n \n \n class HorizontalRule:\n def __init__(self):\n self.needone=0\n def maybe(self):\n if self.needone:\n push('-'*70)\n self.needone=1\n hr=HorizontalRule()\n \n def spill(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n try :\n value=getattr(object,name)\n except Exception:\n \n \n push(self._docdescriptor(name,value,mod))\n else :\n push(self.document(value,\n name,mod,object))\n return attrs\n \n def spilldescriptors(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n push(self._docdescriptor(name,value,mod))\n return attrs\n \n def spilldata(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n if callable(value)or inspect.isdatadescriptor(value):\n doc=getdoc(value)\n else :\n doc=None\n try :\n obj=getattr(object,name)\n except AttributeError:\n obj=homecls.__dict__[name]\n push(self.docother(obj,name,mod,maxlen=70,doc=doc)+\n '\\n')\n return attrs\n \n attrs=[(name,kind,cls,value)\n for name,kind,cls,value in classify_class_attrs(object)\n if visiblename(name,obj=object)]\n \n while attrs:\n if mro:\n thisclass=mro.popleft()\n else :\n thisclass=attrs[0][2]\n attrs,inherited=_split_list(attrs,lambda t:t[2]is thisclass)\n \n if thisclass is builtins.object:\n attrs=inherited\n continue\n elif thisclass is object:\n tag=\"defined here\"\n else :\n tag=\"inherited from %s\"%classname(thisclass,\n object.__module__)\n \n sort_attributes(attrs,object)\n \n \n attrs=spill(\"Methods %s:\\n\"%tag,attrs,\n lambda t:t[1]=='method')\n attrs=spill(\"Class methods %s:\\n\"%tag,attrs,\n lambda t:t[1]=='class method')\n attrs=spill(\"Static methods %s:\\n\"%tag,attrs,\n lambda t:t[1]=='static method')\n attrs=spilldescriptors(\"Data descriptors %s:\\n\"%tag,attrs,\n lambda t:t[1]=='data descriptor')\n attrs=spilldata(\"Data and other attributes %s:\\n\"%tag,attrs,\n lambda t:t[1]=='data')\n \n assert attrs ==[]\n attrs=inherited\n \n contents='\\n'.join(contents)\n if not contents:\n return title+'\\n'\n return title+'\\n'+self.indent(contents.rstrip(),' | ')+'\\n'\n \n def formatvalue(self,object):\n ''\n return '='+self.repr(object)\n \n def docroutine(self,object,name=None ,mod=None ,cl=None ):\n ''\n realname=object.__name__\n name=name or realname\n note=''\n skipdocs=0\n if _is_bound_method(object):\n imclass=object.__self__.__class__\n if cl:\n if imclass is not cl:\n note=' from '+classname(imclass,mod)\n else :\n if object.__self__ is not None :\n note=' method of %s instance'%classname(\n object.__self__.__class__,mod)\n else :\n note=' unbound %s method'%classname(imclass,mod)\n \n if name ==realname:\n title=self.bold(realname)\n else :\n if (cl and realname in cl.__dict__ and\n cl.__dict__[realname]is object):\n skipdocs=1\n title=self.bold(name)+' = '+realname\n argspec=None\n \n if inspect.isroutine(object):\n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if realname =='':\n title=self.bold(name)+' lambda '\n \n \n \n argspec=argspec[1:-1]\n if not argspec:\n argspec='(...)'\n decl=title+argspec+note\n \n if skipdocs:\n return decl+'\\n'\n else :\n doc=getdoc(object)or ''\n return decl+'\\n'+(doc and self.indent(doc).rstrip()+'\\n')\n \n def _docdescriptor(self,name,value,mod):\n results=[]\n push=results.append\n \n if name:\n push(self.bold(name))\n push('\\n')\n doc=getdoc(value)or ''\n if doc:\n push(self.indent(doc))\n push('\\n')\n return ''.join(results)\n \n def docproperty(self,object,name=None ,mod=None ,cl=None ):\n ''\n return self._docdescriptor(name,object,mod)\n \n def docdata(self,object,name=None ,mod=None ,cl=None ):\n ''\n return self._docdescriptor(name,object,mod)\n \n def docother(self,object,name=None ,mod=None ,parent=None ,maxlen=None ,doc=None ):\n ''\n repr=self.repr(object)\n if maxlen:\n line=(name and name+' = 'or '')+repr\n chop=maxlen -len(line)\n if chop <0:repr=repr[:chop]+'...'\n line=(name and self.bold(name)+' = 'or '')+repr\n if doc is not None :\n line +='\\n'+self.indent(str(doc))\n return line\n \nclass _PlainTextDoc(TextDoc):\n ''\n def bold(self,text):\n return text\n \n \n \ndef pager(text):\n ''\n global pager\n pager=getpager()\n pager(text)\n \ndef getpager():\n ''\n if not hasattr(sys.stdin,\"isatty\"):\n return plainpager\n if not hasattr(sys.stdout,\"isatty\"):\n return plainpager\n if not sys.stdin.isatty()or not sys.stdout.isatty():\n return plainpager\n use_pager=os.environ.get('MANPAGER')or os.environ.get('PAGER')\n if use_pager:\n if sys.platform =='win32':\n return lambda text:tempfilepager(plain(text),use_pager)\n elif os.environ.get('TERM')in ('dumb','emacs'):\n return lambda text:pipepager(plain(text),use_pager)\n else :\n return lambda text:pipepager(text,use_pager)\n if os.environ.get('TERM')in ('dumb','emacs'):\n return plainpager\n if sys.platform =='win32':\n return lambda text:tempfilepager(plain(text),'more <')\n if hasattr(os,'system')and os.system('(less) 2>/dev/null')==0:\n return lambda text:pipepager(text,'less')\n \n import tempfile\n (fd,filename)=tempfile.mkstemp()\n os.close(fd)\n try :\n if hasattr(os,'system')and os.system('more \"%s\"'%filename)==0:\n return lambda text:pipepager(text,'more')\n else :\n return ttypager\n finally :\n os.unlink(filename)\n \ndef plain(text):\n ''\n return re.sub('.\\b','',text)\n \ndef pipepager(text,cmd):\n ''\n import subprocess\n proc=subprocess.Popen(cmd,shell=True ,stdin=subprocess.PIPE)\n try :\n with io.TextIOWrapper(proc.stdin,errors='backslashreplace')as pipe:\n try :\n pipe.write(text)\n except KeyboardInterrupt:\n \n \n pass\n except OSError:\n pass\n while True :\n try :\n proc.wait()\n break\n except KeyboardInterrupt:\n \n \n pass\n \ndef tempfilepager(text,cmd):\n ''\n import tempfile\n filename=tempfile.mktemp()\n with open(filename,'w',errors='backslashreplace')as file:\n file.write(text)\n try :\n os.system(cmd+' \"'+filename+'\"')\n finally :\n os.unlink(filename)\n \ndef _escape_stdout(text):\n\n encoding=getattr(sys.stdout,'encoding',None )or 'utf-8'\n return text.encode(encoding,'backslashreplace').decode(encoding)\n \ndef ttypager(text):\n ''\n lines=plain(_escape_stdout(text)).split('\\n')\n try :\n import tty\n fd=sys.stdin.fileno()\n old=tty.tcgetattr(fd)\n tty.setcbreak(fd)\n getchar=lambda :sys.stdin.read(1)\n except (ImportError,AttributeError,io.UnsupportedOperation):\n tty=None\n getchar=lambda :sys.stdin.readline()[:-1][:1]\n \n try :\n try :\n h=int(os.environ.get('LINES',0))\n except ValueError:\n h=0\n if h <=1:\n h=25\n r=inc=h -1\n sys.stdout.write('\\n'.join(lines[:inc])+'\\n')\n while lines[r:]:\n sys.stdout.write('-- more --')\n sys.stdout.flush()\n c=getchar()\n \n if c in ('q','Q'):\n sys.stdout.write('\\r \\r')\n break\n elif c in ('\\r','\\n'):\n sys.stdout.write('\\r \\r'+lines[r]+'\\n')\n r=r+1\n continue\n if c in ('b','B','\\x1b'):\n r=r -inc -inc\n if r <0:r=0\n sys.stdout.write('\\n'+'\\n'.join(lines[r:r+inc])+'\\n')\n r=r+inc\n \n finally :\n if tty:\n tty.tcsetattr(fd,tty.TCSAFLUSH,old)\n \ndef plainpager(text):\n ''\n sys.stdout.write(plain(_escape_stdout(text)))\n \ndef describe(thing):\n ''\n if inspect.ismodule(thing):\n if thing.__name__ in sys.builtin_module_names:\n return 'built-in module '+thing.__name__\n if hasattr(thing,'__path__'):\n return 'package '+thing.__name__\n else :\n return 'module '+thing.__name__\n if inspect.isbuiltin(thing):\n return 'built-in function '+thing.__name__\n if inspect.isgetsetdescriptor(thing):\n return 'getset descriptor %s.%s.%s'%(\n thing.__objclass__.__module__,thing.__objclass__.__name__,\n thing.__name__)\n if inspect.ismemberdescriptor(thing):\n return 'member descriptor %s.%s.%s'%(\n thing.__objclass__.__module__,thing.__objclass__.__name__,\n thing.__name__)\n if inspect.isclass(thing):\n return 'class '+thing.__name__\n if inspect.isfunction(thing):\n return 'function '+thing.__name__\n if inspect.ismethod(thing):\n return 'method '+thing.__name__\n return type(thing).__name__\n \ndef locate(path,forceload=0):\n ''\n parts=[part for part in path.split('.')if part]\n module,n=None ,0\n while n >','&',\n '|','^','~','<','>','<=','>=','==','!=','<>'),\n 'COMPARISON':('<','>','<=','>=','==','!=','<>'),\n 'UNARY':('-','~'),\n 'AUGMENTEDASSIGNMENT':('+=','-=','*=','/=','%=','&=','|=',\n '^=','<<=','>>=','**=','//='),\n 'BITWISE':('<<','>>','&','|','^','~'),\n 'COMPLEX':('j','J')\n }\n symbols={\n '%':'OPERATORS FORMATTING',\n '**':'POWER',\n ',':'TUPLES LISTS FUNCTIONS',\n '.':'ATTRIBUTES FLOAT MODULES OBJECTS',\n '...':'ELLIPSIS',\n ':':'SLICINGS DICTIONARYLITERALS',\n '@':'def class',\n '\\\\':'STRINGS',\n '_':'PRIVATENAMES',\n '__':'PRIVATENAMES SPECIALMETHODS',\n '`':'BACKQUOTES',\n '(':'TUPLES FUNCTIONS CALLS',\n ')':'TUPLES FUNCTIONS CALLS',\n '[':'LISTS SUBSCRIPTS SLICINGS',\n ']':'LISTS SUBSCRIPTS SLICINGS'\n }\n for topic,symbols_ in _symbols_inverse.items():\n for symbol in symbols_:\n topics=symbols.get(symbol,topic)\n if topic not in topics:\n topics=topics+' '+topic\n symbols[symbol]=topics\n \n topics={\n 'TYPES':('types','STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '\n 'FUNCTIONS CLASSES MODULES FILES inspect'),\n 'STRINGS':('strings','str UNICODE SEQUENCES STRINGMETHODS '\n 'FORMATTING TYPES'),\n 'STRINGMETHODS':('string-methods','STRINGS FORMATTING'),\n 'FORMATTING':('formatstrings','OPERATORS'),\n 'UNICODE':('strings','encodings unicode SEQUENCES STRINGMETHODS '\n 'FORMATTING TYPES'),\n 'NUMBERS':('numbers','INTEGER FLOAT COMPLEX TYPES'),\n 'INTEGER':('integers','int range'),\n 'FLOAT':('floating','float math'),\n 'COMPLEX':('imaginary','complex cmath'),\n 'SEQUENCES':('typesseq','STRINGMETHODS FORMATTING range LISTS'),\n 'MAPPINGS':'DICTIONARIES',\n 'FUNCTIONS':('typesfunctions','def TYPES'),\n 'METHODS':('typesmethods','class def CLASSES TYPES'),\n 'CODEOBJECTS':('bltin-code-objects','compile FUNCTIONS TYPES'),\n 'TYPEOBJECTS':('bltin-type-objects','types TYPES'),\n 'FRAMEOBJECTS':'TYPES',\n 'TRACEBACKS':'TYPES',\n 'NONE':('bltin-null-object',''),\n 'ELLIPSIS':('bltin-ellipsis-object','SLICINGS'),\n 'SPECIALATTRIBUTES':('specialattrs',''),\n 'CLASSES':('types','class SPECIALMETHODS PRIVATENAMES'),\n 'MODULES':('typesmodules','import'),\n 'PACKAGES':'import',\n 'EXPRESSIONS':('operator-summary','lambda or and not in is BOOLEAN '\n 'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '\n 'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '\n 'LISTS DICTIONARIES'),\n 'OPERATORS':'EXPRESSIONS',\n 'PRECEDENCE':'EXPRESSIONS',\n 'OBJECTS':('objects','TYPES'),\n 'SPECIALMETHODS':('specialnames','BASICMETHODS ATTRIBUTEMETHODS '\n 'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '\n 'NUMBERMETHODS CLASSES'),\n 'BASICMETHODS':('customization','hash repr str SPECIALMETHODS'),\n 'ATTRIBUTEMETHODS':('attribute-access','ATTRIBUTES SPECIALMETHODS'),\n 'CALLABLEMETHODS':('callable-types','CALLS SPECIALMETHODS'),\n 'SEQUENCEMETHODS':('sequence-types','SEQUENCES SEQUENCEMETHODS '\n 'SPECIALMETHODS'),\n 'MAPPINGMETHODS':('sequence-types','MAPPINGS SPECIALMETHODS'),\n 'NUMBERMETHODS':('numeric-types','NUMBERS AUGMENTEDASSIGNMENT '\n 'SPECIALMETHODS'),\n 'EXECUTION':('execmodel','NAMESPACES DYNAMICFEATURES EXCEPTIONS'),\n 'NAMESPACES':('naming','global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),\n 'DYNAMICFEATURES':('dynamic-features',''),\n 'SCOPING':'NAMESPACES',\n 'FRAMES':'NAMESPACES',\n 'EXCEPTIONS':('exceptions','try except finally raise'),\n 'CONVERSIONS':('conversions',''),\n 'IDENTIFIERS':('identifiers','keywords SPECIALIDENTIFIERS'),\n 'SPECIALIDENTIFIERS':('id-classes',''),\n 'PRIVATENAMES':('atom-identifiers',''),\n 'LITERALS':('atom-literals','STRINGS NUMBERS TUPLELITERALS '\n 'LISTLITERALS DICTIONARYLITERALS'),\n 'TUPLES':'SEQUENCES',\n 'TUPLELITERALS':('exprlists','TUPLES LITERALS'),\n 'LISTS':('typesseq-mutable','LISTLITERALS'),\n 'LISTLITERALS':('lists','LISTS LITERALS'),\n 'DICTIONARIES':('typesmapping','DICTIONARYLITERALS'),\n 'DICTIONARYLITERALS':('dict','DICTIONARIES LITERALS'),\n 'ATTRIBUTES':('attribute-references','getattr hasattr setattr ATTRIBUTEMETHODS'),\n 'SUBSCRIPTS':('subscriptions','SEQUENCEMETHODS'),\n 'SLICINGS':('slicings','SEQUENCEMETHODS'),\n 'CALLS':('calls','EXPRESSIONS'),\n 'POWER':('power','EXPRESSIONS'),\n 'UNARY':('unary','EXPRESSIONS'),\n 'BINARY':('binary','EXPRESSIONS'),\n 'SHIFTING':('shifting','EXPRESSIONS'),\n 'BITWISE':('bitwise','EXPRESSIONS'),\n 'COMPARISON':('comparisons','EXPRESSIONS BASICMETHODS'),\n 'BOOLEAN':('booleans','EXPRESSIONS TRUTHVALUE'),\n 'ASSERTION':'assert',\n 'ASSIGNMENT':('assignment','AUGMENTEDASSIGNMENT'),\n 'AUGMENTEDASSIGNMENT':('augassign','NUMBERMETHODS'),\n 'DELETION':'del',\n 'RETURNING':'return',\n 'IMPORTING':'import',\n 'CONDITIONAL':'if',\n 'LOOPING':('compound','for while break continue'),\n 'TRUTHVALUE':('truth','if while and or not BASICMETHODS'),\n 'DEBUGGING':('debugger','pdb'),\n 'CONTEXTMANAGERS':('context-managers','with'),\n }\n \n def __init__(self,input=None ,output=None ):\n self._input=input\n self._output=output\n \n @property\n def input(self):\n return self._input or sys.stdin\n \n @property\n def output(self):\n return self._output or sys.stdout\n \n def __repr__(self):\n if inspect.stack()[1][3]=='?':\n self()\n return ''\n return '<%s.%s instance>'%(self.__class__.__module__,\n self.__class__.__qualname__)\n \n _GoInteractive=object()\n def __call__(self,request=_GoInteractive):\n if request is not self._GoInteractive:\n self.help(request)\n else :\n self.intro()\n self.interact()\n self.output.write('''\nYou are now leaving help and returning to the Python interpreter.\nIf you want to ask for help on a particular object directly from the\ninterpreter, you can type \"help(object)\". Executing \"help('string')\"\nhas the same effect as typing a particular string at the help> prompt.\n''')\n \n def interact(self):\n self.output.write('\\n')\n while True :\n try :\n request=self.getline('help> ')\n if not request:break\n except (KeyboardInterrupt,EOFError):\n break\n request=request.strip()\n \n \n \n if (len(request)>2 and request[0]==request[-1]in (\"'\",'\"')\n and request[0]not in request[1:-1]):\n request=request[1:-1]\n if request.lower()in ('q','quit'):break\n if request =='help':\n self.intro()\n else :\n self.help(request)\n \n def getline(self,prompt):\n ''\n if self.input is sys.stdin:\n return input(prompt)\n else :\n self.output.write(prompt)\n self.output.flush()\n return self.input.readline()\n \n def help(self,request):\n if type(request)is type(''):\n request=request.strip()\n if request =='keywords':self.listkeywords()\n elif request =='symbols':self.listsymbols()\n elif request =='topics':self.listtopics()\n elif request =='modules':self.listmodules()\n elif request[:8]=='modules ':\n self.listmodules(request.split()[1])\n elif request in self.symbols:self.showsymbol(request)\n elif request in ['True','False','None']:\n \n doc(eval(request),'Help on %s:')\n elif request in self.keywords:self.showtopic(request)\n elif request in self.topics:self.showtopic(request)\n elif request:doc(request,'Help on %s:',output=self._output)\n else :doc(str,'Help on %s:',output=self._output)\n elif isinstance(request,Helper):self()\n else :doc(request,'Help on %s:',output=self._output)\n self.output.write('\\n')\n \n def intro(self):\n self.output.write('''\nWelcome to Python {0}'s help utility!\n\nIf this is your first time using Python, you should definitely check out\nthe tutorial on the Internet at https://docs.python.org/{0}/tutorial/.\n\nEnter the name of any module, keyword, or topic to get help on writing\nPython programs and using Python modules. To quit this help utility and\nreturn to the interpreter, just type \"quit\".\n\nTo get a list of available modules, keywords, symbols, or topics, type\n\"modules\", \"keywords\", \"symbols\", or \"topics\". Each module also comes\nwith a one-line summary of what it does; to list the modules whose name\nor summary contain a given string such as \"spam\", type \"modules spam\".\n'''.format('%d.%d'%sys.version_info[:2]))\n \n def list(self,items,columns=4,width=80):\n items=list(sorted(items))\n colw=width //columns\n rows=(len(items)+columns -1)//columns\n for row in range(rows):\n for col in range(columns):\n i=col *rows+row\n if i =0:\n callback(None ,modname,desc)\n \n for importer,modname,ispkg in pkgutil.walk_packages(onerror=onerror):\n if self.quit:\n break\n \n if key is None :\n callback(None ,modname,'')\n else :\n try :\n spec=pkgutil._get_spec(importer,modname)\n except SyntaxError:\n \n continue\n loader=spec.loader\n if hasattr(loader,'get_source'):\n try :\n source=loader.get_source(modname)\n except Exception:\n if onerror:\n onerror(modname)\n continue\n desc=source_synopsis(io.StringIO(source))or ''\n if hasattr(loader,'get_filename'):\n path=loader.get_filename(modname)\n else :\n path=None\n else :\n try :\n module=importlib._bootstrap._load(spec)\n except ImportError:\n if onerror:\n onerror(modname)\n continue\n desc=module.__doc__.splitlines()[0]if module.__doc__ else ''\n path=getattr(module,'__file__',None )\n name=modname+' - '+desc\n if name.lower().find(key)>=0:\n callback(path,modname,desc)\n \n if completer:\n completer()\n \ndef apropos(key):\n ''\n def callback(path,modname,desc):\n if modname[-9:]=='.__init__':\n modname=modname[:-9]+' (package)'\n print(modname,desc and '- '+desc)\n def onerror(modname):\n pass\n with warnings.catch_warnings():\n warnings.filterwarnings('ignore')\n ModuleScanner().run(callback,key,onerror=onerror)\n \n \n \ndef _start_server(urlhandler,hostname,port):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n import http.server\n import email.message\n import select\n import threading\n \n class DocHandler(http.server.BaseHTTPRequestHandler):\n \n def do_GET(self):\n ''\n\n\n\n \n if self.path.endswith('.css'):\n content_type='text/css'\n else :\n content_type='text/html'\n self.send_response(200)\n self.send_header('Content-Type','%s; charset=UTF-8'%content_type)\n self.end_headers()\n self.wfile.write(self.urlhandler(\n self.path,content_type).encode('utf-8'))\n \n def log_message(self,*args):\n \n pass\n \n class DocServer(http.server.HTTPServer):\n \n def __init__(self,host,port,callback):\n self.host=host\n self.address=(self.host,port)\n self.callback=callback\n self.base.__init__(self,self.address,self.handler)\n self.quit=False\n \n def serve_until_quit(self):\n while not self.quit:\n rd,wr,ex=select.select([self.socket.fileno()],[],[],1)\n if rd:\n self.handle_request()\n self.server_close()\n \n def server_activate(self):\n self.base.server_activate(self)\n if self.callback:\n self.callback(self)\n \n class ServerThread(threading.Thread):\n \n def __init__(self,urlhandler,host,port):\n self.urlhandler=urlhandler\n self.host=host\n self.port=int(port)\n threading.Thread.__init__(self)\n self.serving=False\n self.error=None\n \n def run(self):\n ''\n try :\n DocServer.base=http.server.HTTPServer\n DocServer.handler=DocHandler\n DocHandler.MessageClass=email.message.Message\n DocHandler.urlhandler=staticmethod(self.urlhandler)\n docsvr=DocServer(self.host,self.port,self.ready)\n self.docserver=docsvr\n docsvr.serve_until_quit()\n except Exception as e:\n self.error=e\n \n def ready(self,server):\n self.serving=True\n self.host=server.host\n self.port=server.server_port\n self.url='http://%s:%d/'%(self.host,self.port)\n \n def stop(self):\n ''\n self.docserver.quit=True\n self.join()\n \n \n self.docserver=None\n self.serving=False\n self.url=None\n \n thread=ServerThread(urlhandler,hostname,port)\n thread.start()\n \n \n while not thread.error and not thread.serving:\n time.sleep(.01)\n return thread\n \n \ndef _url_handler(url,content_type=\"text/html\"):\n ''\n\n\n\n\n\n\n \n class _HTMLDoc(HTMLDoc):\n \n def page(self,title,contents):\n ''\n css_path=\"pydoc_data/_pydoc.css\"\n css_link=(\n ''%\n css_path)\n return '''\\\n\nPydoc: %s\n\n%s%s
%s
\n'''%(title,css_link,html_navbar(),contents)\n \n def filelink(self,url,path):\n return '%s'%(url,path)\n \n \n html=_HTMLDoc()\n \n def html_navbar():\n version=html.escape(\"%s [%s, %s]\"%(platform.python_version(),\n platform.python_build()[0],\n platform.python_compiler()))\n return \"\"\"\n
\n Python %s
%s\n
\n
\n \n
\n
\n \n \n
 \n
\n \n \n
\n
\n
\n \"\"\"%(version,html.escape(platform.platform(terse=True )))\n \n def html_index():\n ''\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n heading=html.heading(\n 'Index of Modules',\n '#ffffff','#7799ee')\n names=[name for name in sys.builtin_module_names\n if name !='__main__']\n contents=html.multicolumn(names,bltinlink)\n contents=[heading,'

'+html.bigsection(\n 'Built-in Modules','#ffffff','#ee77aa',contents)]\n \n seen={}\n for dir in sys.path:\n contents.append(html.index(dir,seen))\n \n contents.append(\n '

pydoc by Ka-Ping Yee'\n '<ping@lfw.org>')\n return 'Index of Modules',''.join(contents)\n \n def html_search(key):\n ''\n \n search_result=[]\n \n def callback(path,modname,desc):\n if modname[-9:]=='.__init__':\n modname=modname[:-9]+' (package)'\n search_result.append((modname,desc and '- '+desc))\n \n with warnings.catch_warnings():\n warnings.filterwarnings('ignore')\n def onerror(modname):\n pass\n ModuleScanner().run(callback,key,onerror=onerror)\n \n \n def bltinlink(name):\n return '%s'%(name,name)\n \n results=[]\n heading=html.heading(\n 'Search Results',\n '#ffffff','#7799ee')\n for name,desc in search_result:\n results.append(bltinlink(name)+desc)\n contents=heading+html.bigsection(\n 'key = %s'%key,'#ffffff','#ee77aa','
'.join(results))\n return 'Search Results',contents\n \n def html_getfile(path):\n ''\n path=urllib.parse.unquote(path)\n with tokenize.open(path)as fp:\n lines=html.escape(fp.read())\n body='

%s
'%lines\n heading=html.heading(\n 'File Listing',\n '#ffffff','#7799ee')\n contents=heading+html.bigsection(\n 'File: %s'%path,'#ffffff','#ee77aa',body)\n return 'getfile %s'%path,contents\n \n def html_topics():\n ''\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n heading=html.heading(\n 'INDEX',\n '#ffffff','#7799ee')\n names=sorted(Helper.topics.keys())\n \n contents=html.multicolumn(names,bltinlink)\n contents=heading+html.bigsection(\n 'Topics','#ffffff','#ee77aa',contents)\n return 'Topics',contents\n \n def html_keywords():\n ''\n heading=html.heading(\n 'INDEX',\n '#ffffff','#7799ee')\n names=sorted(Helper.keywords.keys())\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n contents=html.multicolumn(names,bltinlink)\n contents=heading+html.bigsection(\n 'Keywords','#ffffff','#ee77aa',contents)\n return 'Keywords',contents\n \n def html_topicpage(topic):\n ''\n buf=io.StringIO()\n htmlhelp=Helper(buf,buf)\n contents,xrefs=htmlhelp._gettopic(topic)\n if topic in htmlhelp.keywords:\n title='KEYWORD'\n else :\n title='TOPIC'\n heading=html.heading(\n '%s'%title,\n '#ffffff','#7799ee')\n contents='
%s
'%html.markup(contents)\n contents=html.bigsection(topic,'#ffffff','#ee77aa',contents)\n if xrefs:\n xrefs=sorted(xrefs.split())\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n xrefs=html.multicolumn(xrefs,bltinlink)\n xrefs=html.section('Related help topics: ',\n '#ffffff','#ee77aa',xrefs)\n return ('%s %s'%(title,topic),\n ''.join((heading,contents,xrefs)))\n \n def html_getobj(url):\n obj=locate(url,forceload=1)\n if obj is None and url !='None':\n raise ValueError('could not find object')\n title=describe(obj)\n content=html.document(obj,url)\n return title,content\n \n def html_error(url,exc):\n heading=html.heading(\n 'Error',\n '#ffffff','#7799ee')\n contents='
'.join(html.escape(line)for line in\n format_exception_only(type(exc),exc))\n contents=heading+html.bigsection(url,'#ffffff','#bb0000',\n contents)\n return \"Error - %s\"%url,contents\n \n def get_html_page(url):\n ''\n complete_url=url\n if url.endswith('.html'):\n url=url[:-5]\n try :\n if url in (\"\",\"index\"):\n title,content=html_index()\n elif url ==\"topics\":\n title,content=html_topics()\n elif url ==\"keywords\":\n title,content=html_keywords()\n elif '='in url:\n op,_,url=url.partition('=')\n if op ==\"search?key\":\n title,content=html_search(url)\n elif op ==\"getfile?key\":\n title,content=html_getfile(url)\n elif op ==\"topic?key\":\n \n try :\n title,content=html_topicpage(url)\n except ValueError:\n title,content=html_getobj(url)\n elif op ==\"get?key\":\n \n if url in (\"\",\"index\"):\n title,content=html_index()\n else :\n try :\n title,content=html_getobj(url)\n except ValueError:\n title,content=html_topicpage(url)\n else :\n raise ValueError('bad pydoc url')\n else :\n title,content=html_getobj(url)\n except Exception as exc:\n \n title,content=html_error(complete_url,exc)\n return html.page(title,content)\n \n if url.startswith('/'):\n url=url[1:]\n if content_type =='text/css':\n path_here=os.path.dirname(os.path.realpath(__file__))\n css_path=os.path.join(path_here,url)\n with open(css_path)as fp:\n return ''.join(fp.readlines())\n elif content_type =='text/html':\n return get_html_page(url)\n \n raise TypeError('unknown content type %r for url %s'%(content_type,url))\n \n \ndef browse(port=0,*,open_browser=True ,hostname='localhost'):\n ''\n\n\n\n \n import webbrowser\n serverthread=_start_server(_url_handler,hostname,port)\n if serverthread.error:\n print(serverthread.error)\n return\n if serverthread.serving:\n server_help_msg='Server commands: [b]rowser, [q]uit'\n if open_browser:\n webbrowser.open(serverthread.url)\n try :\n print('Server ready at',serverthread.url)\n print(server_help_msg)\n while serverthread.serving:\n cmd=input('server> ')\n cmd=cmd.lower()\n if cmd =='q':\n break\n elif cmd =='b':\n webbrowser.open(serverthread.url)\n else :\n print(server_help_msg)\n except (KeyboardInterrupt,EOFError):\n print()\n finally :\n if serverthread.serving:\n serverthread.stop()\n print('Server stopped')\n \n \n \n \ndef ispath(x):\n return isinstance(x,str)and x.find(os.sep)>=0\n \ndef _get_revised_path(given_path,argv0):\n ''\n\n\n\n\n \n \n \n \n \n \n \n if ''in given_path or os.curdir in given_path or os.getcwd()in given_path:\n return None\n \n \n \n stdlib_dir=os.path.dirname(__file__)\n script_dir=os.path.dirname(argv0)\n revised_path=given_path.copy()\n if script_dir in given_path and not os.path.samefile(script_dir,stdlib_dir):\n revised_path.remove(script_dir)\n revised_path.insert(0,os.getcwd())\n return revised_path\n \n \n \ndef _adjust_cli_sys_path():\n ''\n\n\n \n revised_path=_get_revised_path(sys.path,sys.argv[0])\n if revised_path is not None :\n sys.path[:]=revised_path\n \n \ndef cli():\n ''\n import getopt\n class BadUsage(Exception):pass\n \n _adjust_cli_sys_path()\n \n try :\n opts,args=getopt.getopt(sys.argv[1:],'bk:n:p:w')\n writing=False\n start_server=False\n open_browser=False\n port=0\n hostname='localhost'\n for opt,val in opts:\n if opt =='-b':\n start_server=True\n open_browser=True\n if opt =='-k':\n apropos(val)\n return\n if opt =='-p':\n start_server=True\n port=val\n if opt =='-w':\n writing=True\n if opt =='-n':\n start_server=True\n hostname=val\n \n if start_server:\n browse(port,hostname=hostname,open_browser=open_browser)\n return\n \n if not args:raise BadUsage\n for arg in args:\n if ispath(arg)and not os.path.exists(arg):\n print('file %r does not exist'%arg)\n break\n try :\n if ispath(arg)and os.path.isfile(arg):\n arg=importfile(arg)\n if writing:\n if ispath(arg)and os.path.isdir(arg):\n writedocs(arg)\n else :\n writedoc(arg)\n else :\n help.help(arg)\n except ErrorDuringImport as value:\n print(value)\n \n except (getopt.error,BadUsage):\n cmd=os.path.splitext(os.path.basename(sys.argv[0]))[0]\n print(\"\"\"pydoc - the Python documentation tool\n\n{cmd} ...\n Show text documentation on something. may be the name of a\n Python keyword, topic, function, module, or package, or a dotted\n reference to a class or function within a module or module in a\n package. If contains a '{sep}', it is used as the path to a\n Python source file to document. If name is 'keywords', 'topics',\n or 'modules', a listing of these things is displayed.\n\n{cmd} -k \n Search for a keyword in the synopsis lines of all available modules.\n\n{cmd} -n \n Start an HTTP server with the given hostname (default: localhost).\n\n{cmd} -p \n Start an HTTP server on the given port on the local machine. Port\n number 0 can be used to get an arbitrary unused port.\n\n{cmd} -b\n Start an HTTP server on an arbitrary unused port and open a Web browser\n to interactively browse documentation. This option can be used in\n combination with -n and/or -p.\n\n{cmd} -w ...\n Write out the HTML documentation for a module to a file in the current\n directory. If contains a '{sep}', it is treated as a filename; if\n it names a directory, documentation is written for all the contents.\n\"\"\".format(cmd=cmd,sep=os.sep))\n \nif __name__ =='__main__':\n cli()\n", ["builtins", "collections", "email.message", "getopt", "http.server", "importlib._bootstrap", "importlib._bootstrap_external", "importlib.machinery", "importlib.util", "inspect", "io", "os", "pkgutil", "platform", "pydoc_data.topics", "re", "reprlib", "select", "subprocess", "sys", "tempfile", "textwrap", "threading", "time", "tokenize", "traceback", "tty", "urllib.parse", "warnings", "webbrowser"]], "py_compile": [".py", "''\n\n\n\n\nimport enum\nimport importlib._bootstrap_external\nimport importlib.machinery\nimport importlib.util\nimport os\nimport os.path\nimport sys\nimport traceback\n\n__all__=[\"compile\",\"main\",\"PyCompileError\",\"PycInvalidationMode\"]\n\n\nclass PyCompileError(Exception):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,exc_type,exc_value,file,msg=''):\n exc_type_name=exc_type.__name__\n if exc_type is SyntaxError:\n tbtext=''.join(traceback.format_exception_only(\n exc_type,exc_value))\n errmsg=tbtext.replace('File \"\"','File \"%s\"'%file)\n else :\n errmsg=\"Sorry: %s: %s\"%(exc_type_name,exc_value)\n \n Exception.__init__(self,msg or errmsg,exc_type_name,exc_value,file)\n \n self.exc_type_name=exc_type_name\n self.exc_value=exc_value\n self.file=file\n self.msg=msg or errmsg\n \n def __str__(self):\n return self.msg\n \n \nclass PycInvalidationMode(enum.Enum):\n TIMESTAMP=1\n CHECKED_HASH=2\n UNCHECKED_HASH=3\n \n \ndef compile(file,cfile=None ,dfile=None ,doraise=False ,optimize=-1,\ninvalidation_mode=PycInvalidationMode.TIMESTAMP):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if os.environ.get('SOURCE_DATE_EPOCH'):\n invalidation_mode=PycInvalidationMode.CHECKED_HASH\n if cfile is None :\n if optimize >=0:\n optimization=optimize if optimize >=1 else ''\n cfile=importlib.util.cache_from_source(file,\n optimization=optimization)\n else :\n cfile=importlib.util.cache_from_source(file)\n if os.path.islink(cfile):\n msg=('{} is a symlink and will be changed into a regular file if '\n 'import writes a byte-compiled file to it')\n raise FileExistsError(msg.format(cfile))\n elif os.path.exists(cfile)and not os.path.isfile(cfile):\n msg=('{} is a non-regular file and will be changed into a regular '\n 'one if import writes a byte-compiled file to it')\n raise FileExistsError(msg.format(cfile))\n loader=importlib.machinery.SourceFileLoader('',file)\n source_bytes=loader.get_data(file)\n try :\n code=loader.source_to_code(source_bytes,dfile or file,\n _optimize=optimize)\n except Exception as err:\n py_exc=PyCompileError(err.__class__,err,dfile or file)\n if doraise:\n raise py_exc\n else :\n sys.stderr.write(py_exc.msg+'\\n')\n return\n try :\n dirname=os.path.dirname(cfile)\n if dirname:\n os.makedirs(dirname)\n except FileExistsError:\n pass\n if invalidation_mode ==PycInvalidationMode.TIMESTAMP:\n source_stats=loader.path_stats(file)\n bytecode=importlib._bootstrap_external._code_to_timestamp_pyc(\n code,source_stats['mtime'],source_stats['size'])\n else :\n source_hash=importlib.util.source_hash(source_bytes)\n bytecode=importlib._bootstrap_external._code_to_hash_pyc(\n code,\n source_hash,\n (invalidation_mode ==PycInvalidationMode.CHECKED_HASH),\n )\n mode=importlib._bootstrap_external._calc_mode(file)\n importlib._bootstrap_external._write_atomic(cfile,bytecode,mode)\n return cfile\n \n \ndef main(args=None ):\n ''\n\n\n\n\n\n\n\n\n \n if args is None :\n args=sys.argv[1:]\n rv=0\n if args ==['-']:\n while True :\n filename=sys.stdin.readline()\n if not filename:\n break\n filename=filename.rstrip('\\n')\n try :\n compile(filename,doraise=True )\n except PyCompileError as error:\n rv=1\n sys.stderr.write(\"%s\\n\"%error.msg)\n except OSError as error:\n rv=1\n sys.stderr.write(\"%s\\n\"%error)\n else :\n for filename in args:\n try :\n compile(filename,doraise=True )\n except PyCompileError as error:\n \n rv=1\n sys.stderr.write(\"%s\\n\"%error.msg)\n return rv\n \nif __name__ ==\"__main__\":\n sys.exit(main())\n", ["enum", "importlib._bootstrap_external", "importlib.machinery", "importlib.util", "os", "os.path", "sys", "traceback"]], "queue": [".py", "''\n\nimport threading\nfrom collections import deque\nfrom heapq import heappush,heappop\nfrom time import monotonic as time\ntry :\n from _queue import SimpleQueue\nexcept ImportError:\n SimpleQueue=None\n \n__all__=['Empty','Full','Queue','PriorityQueue','LifoQueue','SimpleQueue']\n\n\ntry :\n from _queue import Empty\nexcept AttributeError:\n class Empty(Exception):\n ''\n pass\n \nclass Full(Exception):\n ''\n pass\n \n \nclass Queue:\n ''\n\n\n \n \n def __init__(self,maxsize=0):\n self.maxsize=maxsize\n self._init(maxsize)\n \n \n \n \n \n self.mutex=threading.Lock()\n \n \n \n self.not_empty=threading.Condition(self.mutex)\n \n \n \n self.not_full=threading.Condition(self.mutex)\n \n \n \n self.all_tasks_done=threading.Condition(self.mutex)\n self.unfinished_tasks=0\n \n def task_done(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n with self.all_tasks_done:\n unfinished=self.unfinished_tasks -1\n if unfinished <=0:\n if unfinished <0:\n raise ValueError('task_done() called too many times')\n self.all_tasks_done.notify_all()\n self.unfinished_tasks=unfinished\n \n def join(self):\n ''\n\n\n\n\n\n\n \n with self.all_tasks_done:\n while self.unfinished_tasks:\n self.all_tasks_done.wait()\n \n def qsize(self):\n ''\n with self.mutex:\n return self._qsize()\n \n def empty(self):\n ''\n\n\n\n\n\n\n\n\n \n with self.mutex:\n return not self._qsize()\n \n def full(self):\n ''\n\n\n\n\n\n \n with self.mutex:\n return 0 0:\n if not block:\n if self._qsize()>=self.maxsize:\n raise Full\n elif timeout is None :\n while self._qsize()>=self.maxsize:\n self.not_full.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else :\n endtime=time()+timeout\n while self._qsize()>=self.maxsize:\n remaining=endtime -time()\n if remaining <=0.0:\n raise Full\n self.not_full.wait(remaining)\n self._put(item)\n self.unfinished_tasks +=1\n self.not_empty.notify()\n \n def get(self,block=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n \n with self.not_empty:\n if not block:\n if not self._qsize():\n raise Empty\n elif timeout is None :\n while not self._qsize():\n self.not_empty.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else :\n endtime=time()+timeout\n while not self._qsize():\n remaining=endtime -time()\n if remaining <=0.0:\n raise Empty\n self.not_empty.wait(remaining)\n item=self._get()\n self.not_full.notify()\n return item\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False )\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False )\n \n \n \n \n \n \n def _init(self,maxsize):\n self.queue=deque()\n \n def _qsize(self):\n return len(self.queue)\n \n \n def _put(self,item):\n self.queue.append(item)\n \n \n def _get(self):\n return self.queue.popleft()\n \n \nclass PriorityQueue(Queue):\n ''\n\n\n \n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n heappush(self.queue,item)\n \n def _get(self):\n return heappop(self.queue)\n \n \nclass LifoQueue(Queue):\n ''\n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n self.queue.append(item)\n \n def _get(self):\n return self.queue.pop()\n \n \nclass _PySimpleQueue:\n ''\n\n\n \n \n \n \n \n \n def __init__(self):\n self._queue=deque()\n self._count=threading.Semaphore(0)\n \n def put(self,item,block=True ,timeout=None ):\n ''\n\n\n\n \n self._queue.append(item)\n self._count.release()\n \n def get(self,block=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n \n if timeout is not None and timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n if not self._count.acquire(block,timeout):\n raise Empty\n return self._queue.popleft()\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False )\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False )\n \n def empty(self):\n ''\n return len(self._queue)==0\n \n def qsize(self):\n ''\n return len(self._queue)\n \n \nif SimpleQueue is None :\n SimpleQueue=_PySimpleQueue\n", ["_queue", "collections", "heapq", "threading", "time"]], "quopri": [".py", "#! /usr/bin/env python3\n\n\"\"\"Conversions to/from quoted-printable transport encoding as per RFC 1521.\"\"\"\n\n\n\n__all__=[\"encode\",\"decode\",\"encodestring\",\"decodestring\"]\n\nESCAPE=b'='\nMAXLINESIZE=76\nHEX=b'0123456789ABCDEF'\nEMPTYSTRING=b''\n\ntry :\n from binascii import a2b_qp,b2a_qp\nexcept ImportError:\n a2b_qp=None\n b2a_qp=None\n \n \ndef needsquoting(c,quotetabs,header):\n ''\n\n\n\n\n \n assert isinstance(c,bytes)\n if c in b' \\t':\n return quotetabs\n \n if c ==b'_':\n return header\n return c ==ESCAPE or not (b' '<=c <=b'~')\n \ndef quote(c):\n ''\n assert isinstance(c,bytes)and len(c)==1\n c=ord(c)\n return ESCAPE+bytes((HEX[c //16],HEX[c %16]))\n \n \n \ndef encode(input,output,quotetabs,header=False ):\n ''\n\n\n\n\n\n \n \n if b2a_qp is not None :\n data=input.read()\n odata=b2a_qp(data,quotetabs=quotetabs,header=header)\n output.write(odata)\n return\n \n def write(s,output=output,lineEnd=b'\\n'):\n \n \n if s and s[-1:]in b' \\t':\n output.write(s[:-1]+quote(s[-1:])+lineEnd)\n elif s ==b'.':\n output.write(quote(s)+lineEnd)\n else :\n output.write(s+lineEnd)\n \n prevline=None\n while 1:\n line=input.readline()\n if not line:\n break\n outline=[]\n \n stripped=b''\n if line[-1:]==b'\\n':\n line=line[:-1]\n stripped=b'\\n'\n \n for c in line:\n c=bytes((c,))\n if needsquoting(c,quotetabs,header):\n c=quote(c)\n if header and c ==b' ':\n outline.append(b'_')\n else :\n outline.append(c)\n \n if prevline is not None :\n write(prevline)\n \n \n thisline=EMPTYSTRING.join(outline)\n while len(thisline)>MAXLINESIZE:\n \n \n write(thisline[:MAXLINESIZE -1],lineEnd=b'=\\n')\n thisline=thisline[MAXLINESIZE -1:]\n \n prevline=thisline\n \n if prevline is not None :\n write(prevline,lineEnd=stripped)\n \ndef encodestring(s,quotetabs=False ,header=False ):\n if b2a_qp is not None :\n return b2a_qp(s,quotetabs=quotetabs,header=header)\n from io import BytesIO\n infp=BytesIO(s)\n outfp=BytesIO()\n encode(infp,outfp,quotetabs,header)\n return outfp.getvalue()\n \n \n \ndef decode(input,output,header=False ):\n ''\n\n \n \n if a2b_qp is not None :\n data=input.read()\n odata=a2b_qp(data,header=header)\n output.write(odata)\n return\n \n new=b''\n while 1:\n line=input.readline()\n if not line:break\n i,n=0,len(line)\n if n >0 and line[n -1:n]==b'\\n':\n partial=0 ;n=n -1\n \n while n >0 and line[n -1:n]in b\" \\t\\r\":\n n=n -1\n else :\n partial=1\n while i ...) The substring matched by the group is accessible by name.\n (?P=name) Matches the text matched earlier by the group named name.\n (?#...) A comment; ignored.\n (?=...) Matches if ... matches next, but doesn't consume the string.\n (?!...) Matches if ... doesn't match next.\n (?<=...) Matches if preceded by ... (must be fixed length).\n (?=_MAXCACHE:\n \n try :\n del _cache[next(iter(_cache))]\n except (StopIteration,RuntimeError,KeyError):\n pass\n _cache[type(pattern),pattern,flags]=p\n return p\n \n@functools.lru_cache(_MAXCACHE)\ndef _compile_repl(repl,pattern):\n\n return sre_parse.parse_template(repl,pattern)\n \ndef _expand(pattern,match,template):\n\n template=sre_parse.parse_template(template,pattern)\n return sre_parse.expand_template(template,match)\n \ndef _subx(pattern,template):\n\n template=_compile_repl(template,pattern)\n if not template[0]and len(template[1])==1:\n \n return template[1][0]\n def filter(match,template=template):\n return sre_parse.expand_template(template,match)\n return filter\n \n \n \nimport copyreg\n\ndef _pickle(p):\n return _compile,(p.pattern,p.flags)\n \ncopyreg.pickle(Pattern,_pickle,_compile)\n\n\n\n\nclass Scanner:\n def __init__(self,lexicon,flags=0):\n from sre_constants import BRANCH,SUBPATTERN\n if isinstance(flags,RegexFlag):\n flags=flags.value\n self.lexicon=lexicon\n \n p=[]\n s=sre_parse.Pattern()\n s.flags=flags\n for phrase,action in lexicon:\n gid=s.opengroup()\n p.append(sre_parse.SubPattern(s,[\n (SUBPATTERN,(gid,0,0,sre_parse.parse(phrase,flags))),\n ]))\n s.closegroup(gid,p[-1])\n p=sre_parse.SubPattern(s,[(BRANCH,(None ,p))])\n self.scanner=sre_compile.compile(p)\n def scan(self,string):\n result=[]\n append=result.append\n match=self.scanner.scanner(string).match\n i=0\n while True :\n m=match()\n if not m:\n break\n j=m.end()\n if i ==j:\n break\n action=self.lexicon[m.lastindex -1][1]\n if callable(action):\n self.match=m\n action=action(self,m.group())\n if action is not None :\n append(action)\n i=j\n return result,string[i:]\n", ["_locale", "copyreg", "enum", "functools", "sre_compile", "sre_constants", "sre_parse"]], "reprlib": [".py", "''\n\n__all__=[\"Repr\",\"repr\",\"recursive_repr\"]\n\nimport builtins\nfrom itertools import islice\nfrom _thread import get_ident\n\ndef recursive_repr(fillvalue='...'):\n ''\n \n def decorating_function(user_function):\n repr_running=set()\n \n def wrapper(self):\n key=id(self),get_ident()\n if key in repr_running:\n return fillvalue\n repr_running.add(key)\n try :\n result=user_function(self)\n finally :\n repr_running.discard(key)\n return result\n \n \n wrapper.__module__=getattr(user_function,'__module__')\n wrapper.__doc__=getattr(user_function,'__doc__')\n wrapper.__name__=getattr(user_function,'__name__')\n wrapper.__qualname__=getattr(user_function,'__qualname__')\n wrapper.__annotations__=getattr(user_function,'__annotations__',{})\n return wrapper\n \n return decorating_function\n \nclass Repr:\n\n def __init__(self):\n self.maxlevel=6\n self.maxtuple=6\n self.maxlist=6\n self.maxarray=5\n self.maxdict=4\n self.maxset=6\n self.maxfrozenset=6\n self.maxdeque=6\n self.maxstring=30\n self.maxlong=40\n self.maxother=30\n \n def repr(self,x):\n return self.repr1(x,self.maxlevel)\n \n def repr1(self,x,level):\n typename=type(x).__name__\n if ' 'in typename:\n parts=typename.split()\n typename='_'.join(parts)\n if hasattr(self,'repr_'+typename):\n return getattr(self,'repr_'+typename)(x,level)\n else :\n return self.repr_instance(x,level)\n \n def _repr_iterable(self,x,level,left,right,maxiter,trail=''):\n n=len(x)\n if level <=0 and n:\n s='...'\n else :\n newlevel=level -1\n repr1=self.repr1\n pieces=[repr1(elem,newlevel)for elem in islice(x,maxiter)]\n if n >maxiter:pieces.append('...')\n s=', '.join(pieces)\n if n ==1 and trail:right=trail+right\n return '%s%s%s'%(left,s,right)\n \n def repr_tuple(self,x,level):\n return self._repr_iterable(x,level,'(',')',self.maxtuple,',')\n \n def repr_list(self,x,level):\n return self._repr_iterable(x,level,'[',']',self.maxlist)\n \n def repr_array(self,x,level):\n if not x:\n return \"array('%s')\"%x.typecode\n header=\"array('%s', [\"%x.typecode\n return self._repr_iterable(x,level,header,'])',self.maxarray)\n \n def repr_set(self,x,level):\n if not x:\n return 'set()'\n x=_possibly_sorted(x)\n return self._repr_iterable(x,level,'{','}',self.maxset)\n \n def repr_frozenset(self,x,level):\n if not x:\n return 'frozenset()'\n x=_possibly_sorted(x)\n return self._repr_iterable(x,level,'frozenset({','})',\n self.maxfrozenset)\n \n def repr_deque(self,x,level):\n return self._repr_iterable(x,level,'deque([','])',self.maxdeque)\n \n def repr_dict(self,x,level):\n n=len(x)\n if n ==0:return '{}'\n if level <=0:return '{...}'\n newlevel=level -1\n repr1=self.repr1\n pieces=[]\n for key in islice(_possibly_sorted(x),self.maxdict):\n keyrepr=repr1(key,newlevel)\n valrepr=repr1(x[key],newlevel)\n pieces.append('%s: %s'%(keyrepr,valrepr))\n if n >self.maxdict:pieces.append('...')\n s=', '.join(pieces)\n return '{%s}'%(s,)\n \n def repr_str(self,x,level):\n s=builtins.repr(x[:self.maxstring])\n if len(s)>self.maxstring:\n i=max(0,(self.maxstring -3)//2)\n j=max(0,self.maxstring -3 -i)\n s=builtins.repr(x[:i]+x[len(x)-j:])\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n def repr_int(self,x,level):\n s=builtins.repr(x)\n if len(s)>self.maxlong:\n i=max(0,(self.maxlong -3)//2)\n j=max(0,self.maxlong -3 -i)\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n def repr_instance(self,x,level):\n try :\n s=builtins.repr(x)\n \n \n except Exception:\n return '<%s instance at %#x>'%(x.__class__.__name__,id(x))\n if len(s)>self.maxother:\n i=max(0,(self.maxother -3)//2)\n j=max(0,self.maxother -3 -i)\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n \ndef _possibly_sorted(x):\n\n\n\n try :\n return sorted(x)\n except Exception:\n return list(x)\n \naRepr=Repr()\nrepr=aRepr.repr\n", ["_thread", "builtins", "itertools"]], "select": [".py", "''\n\n\n\n\n\n\n\n\n\nimport errno\nimport os\n\nclass error(Exception):pass\n\nALL=None\n\n_exception_map={\n\n\n\n\n\n\n}\n\ndef _map_exception(exc,circumstance=ALL):\n try :\n mapped_exception=_exception_map[(exc.__class__,circumstance)]\n mapped_exception.java_exception=exc\n return mapped_exception\n except KeyError:\n return error(-1,'Unmapped java exception: <%s:%s>'%(exc.toString(),circumstance))\n \nPOLLIN=1\nPOLLOUT=2\n\n\n\n\n\nPOLLPRI=4\nPOLLERR=8\nPOLLHUP=16\nPOLLNVAL=32\n\ndef _getselectable(selectable_object):\n try :\n channel=selectable_object.getchannel()\n except :\n try :\n channel=selectable_object.fileno().getChannel()\n except :\n raise TypeError(\"Object '%s' is not watchable\"%selectable_object,\n errno.ENOTSOCK)\n \n if channel and not isinstance(channel,java.nio.channels.SelectableChannel):\n raise TypeError(\"Object '%s' is not watchable\"%selectable_object,\n errno.ENOTSOCK)\n return channel\n \nclass poll:\n\n def __init__(self):\n self.selector=java.nio.channels.Selector.open()\n self.chanmap={}\n self.unconnected_sockets=[]\n \n def _register_channel(self,socket_object,channel,mask):\n jmask=0\n if mask&POLLIN:\n \n if channel.validOps()&OP_ACCEPT:\n jmask=OP_ACCEPT\n else :\n jmask=OP_READ\n if mask&POLLOUT:\n if channel.validOps()&OP_WRITE:\n jmask |=OP_WRITE\n if channel.validOps()&OP_CONNECT:\n jmask |=OP_CONNECT\n selectionkey=channel.register(self.selector,jmask)\n self.chanmap[channel]=(socket_object,selectionkey)\n \n def _check_unconnected_sockets(self):\n temp_list=[]\n for socket_object,mask in self.unconnected_sockets:\n channel=_getselectable(socket_object)\n if channel is not None :\n self._register_channel(socket_object,channel,mask)\n else :\n temp_list.append((socket_object,mask))\n self.unconnected_sockets=temp_list\n \n def register(self,socket_object,mask=POLLIN |POLLOUT |POLLPRI):\n try :\n channel=_getselectable(socket_object)\n if channel is None :\n \n \n self.unconnected_sockets.append((socket_object,mask))\n return\n self._register_channel(socket_object,channel,mask)\n except BaseException:\n \n raise _map_exception(jlx)\n \n def unregister(self,socket_object):\n try :\n channel=_getselectable(socket_object)\n self.chanmap[channel][1].cancel()\n del self.chanmap[channel]\n except BaseException:\n \n raise _map_exception(jlx)\n \n def _dopoll(self,timeout):\n if timeout is None or timeout <0:\n self.selector.select()\n else :\n try :\n timeout=int(timeout)\n if not timeout:\n self.selector.selectNow()\n else :\n \n self.selector.select(timeout)\n except ValueError as vx:\n raise error(\"poll timeout must be a number of milliseconds or None\",errno.EINVAL)\n \n return self.selector.selectedKeys()\n \n def poll(self,timeout=None ):\n try :\n self._check_unconnected_sockets()\n selectedkeys=self._dopoll(timeout)\n results=[]\n for k in selectedkeys.iterator():\n jmask=k.readyOps()\n pymask=0\n if jmask&OP_READ:pymask |=POLLIN\n if jmask&OP_WRITE:pymask |=POLLOUT\n if jmask&OP_ACCEPT:pymask |=POLLIN\n if jmask&OP_CONNECT:pymask |=POLLOUT\n \n results.append((self.chanmap[k.channel()][0],pymask))\n return results\n except BaseException:\n \n raise _map_exception(jlx)\n \n def _deregister_all(self):\n try :\n for k in self.selector.keys():\n k.cancel()\n \n self.selector.selectNow()\n except BaseException:\n \n raise _map_exception(jlx)\n \n def close(self):\n try :\n self._deregister_all()\n self.selector.close()\n except BaseException:\n \n raise _map_exception(jlx)\n \ndef _calcselecttimeoutvalue(value):\n if value is None :\n return None\n try :\n floatvalue=float(value)\n except Exception as x:\n raise TypeError(\"Select timeout value must be a number or None\")\n if value <0:\n raise error(\"Select timeout value cannot be negative\",errno.EINVAL)\n if floatvalue <0.000001:\n return 0\n return int(floatvalue *1000)\n \n \n \n \nclass poll_object_cache:\n\n def __init__(self):\n self.is_windows=os.name =='nt'\n if self.is_windows:\n self.poll_object_queue=Queue.Queue()\n import atexit\n atexit.register(self.finalize)\n \n def get_poll_object(self):\n if not self.is_windows:\n return poll()\n try :\n return self.poll_object_queue.get(False )\n except Queue.Empty:\n return poll()\n \n def release_poll_object(self,pobj):\n if self.is_windows:\n pobj._deregister_all()\n self.poll_object_queue.put(pobj)\n else :\n pobj.close()\n \n def finalize(self):\n if self.is_windows:\n while True :\n try :\n p=self.poll_object_queue.get(False )\n p.close()\n except Queue.Empty:\n return\n \n_poll_object_cache=poll_object_cache()\n\ndef native_select(read_fd_list,write_fd_list,outofband_fd_list,timeout=None ):\n timeout=_calcselecttimeoutvalue(timeout)\n \n pobj=_poll_object_cache.get_poll_object()\n try :\n registered_for_read={}\n \n for fd in read_fd_list:\n pobj.register(fd,POLLIN)\n registered_for_read[fd]=1\n \n for fd in write_fd_list:\n if fd in registered_for_read:\n \n pobj.register(fd,POLLIN |POLLOUT)\n else :\n pobj.register(fd,POLLOUT)\n results=pobj.poll(timeout)\n \n read_ready_list,write_ready_list,oob_ready_list=[],[],[]\n for fd,mask in results:\n if mask&POLLIN:\n read_ready_list.append(fd)\n if mask&POLLOUT:\n write_ready_list.append(fd)\n return read_ready_list,write_ready_list,oob_ready_list\n finally :\n _poll_object_cache.release_poll_object(pobj)\n \nselect=native_select\n\ndef cpython_compatible_select(read_fd_list,write_fd_list,outofband_fd_list,timeout=None ):\n\n\n modified_channels=[]\n try :\n for socket_list in [read_fd_list,write_fd_list,outofband_fd_list]:\n for s in socket_list:\n channel=_getselectable(s)\n if channel.isBlocking():\n modified_channels.append(channel)\n channel.configureBlocking(0)\n return native_select(read_fd_list,write_fd_list,outofband_fd_list,timeout)\n finally :\n for channel in modified_channels:\n channel.configureBlocking(1)\n", ["atexit", "errno", "os"]], "selectors": [".py", "''\n\n\n\n\n\n\nfrom abc import ABCMeta,abstractmethod\nfrom collections import namedtuple\nfrom collections.abc import Mapping\nimport math\nimport select\nimport sys\n\n\n\nEVENT_READ=(1 <<0)\nEVENT_WRITE=(1 <<1)\n\n\ndef _fileobj_to_fd(fileobj):\n ''\n\n\n\n\n\n\n\n\n\n \n if isinstance(fileobj,int):\n fd=fileobj\n else :\n try :\n fd=int(fileobj.fileno())\n except (AttributeError,TypeError,ValueError):\n raise ValueError(\"Invalid file object: \"\n \"{!r}\".format(fileobj))from None\n if fd <0:\n raise ValueError(\"Invalid file descriptor: {}\".format(fd))\n return fd\n \n \nSelectorKey=namedtuple('SelectorKey',['fileobj','fd','events','data'])\n\nSelectorKey.__doc__=\"\"\"SelectorKey(fileobj, fd, events, data)\n\n Object used to associate a file object to its backing\n file descriptor, selected event mask, and attached data.\n\"\"\"\nif sys.version_info >=(3,5):\n SelectorKey.fileobj.__doc__='File object registered.'\n SelectorKey.fd.__doc__='Underlying file descriptor.'\n SelectorKey.events.__doc__='Events that must be waited for on this file object.'\n SelectorKey.data.__doc__=('''Optional opaque data associated to this file object.\n For example, this could be used to store a per-client session ID.''')\n \nclass _SelectorMapping(Mapping):\n ''\n \n def __init__(self,selector):\n self._selector=selector\n \n def __len__(self):\n return len(self._selector._fd_to_key)\n \n def __getitem__(self,fileobj):\n try :\n fd=self._selector._fileobj_lookup(fileobj)\n return self._selector._fd_to_key[fd]\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n \n def __iter__(self):\n return iter(self._selector._fd_to_key)\n \n \nclass BaseSelector(metaclass=ABCMeta):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n @abstractmethod\n def register(self,fileobj,events,data=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def unregister(self,fileobj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def modify(self,fileobj,events,data=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self.unregister(fileobj)\n return self.register(fileobj,events,data)\n \n @abstractmethod\n def select(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def close(self):\n ''\n\n\n \n pass\n \n def get_key(self,fileobj):\n ''\n\n\n\n \n mapping=self.get_map()\n if mapping is None :\n raise RuntimeError('Selector is closed')\n try :\n return mapping[fileobj]\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n \n @abstractmethod\n def get_map(self):\n ''\n raise NotImplementedError\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n self.close()\n \n \nclass _BaseSelectorImpl(BaseSelector):\n ''\n \n def __init__(self):\n \n self._fd_to_key={}\n \n self._map=_SelectorMapping(self)\n \n def _fileobj_lookup(self,fileobj):\n ''\n\n\n\n\n\n\n \n try :\n return _fileobj_to_fd(fileobj)\n except ValueError:\n \n for key in self._fd_to_key.values():\n if key.fileobj is fileobj:\n return key.fd\n \n raise\n \n def register(self,fileobj,events,data=None ):\n if (not events)or (events&~(EVENT_READ |EVENT_WRITE)):\n raise ValueError(\"Invalid events: {!r}\".format(events))\n \n key=SelectorKey(fileobj,self._fileobj_lookup(fileobj),events,data)\n \n if key.fd in self._fd_to_key:\n raise KeyError(\"{!r} (FD {}) is already registered\"\n .format(fileobj,key.fd))\n \n self._fd_to_key[key.fd]=key\n return key\n \n def unregister(self,fileobj):\n try :\n key=self._fd_to_key.pop(self._fileobj_lookup(fileobj))\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n return key\n \n def modify(self,fileobj,events,data=None ):\n try :\n key=self._fd_to_key[self._fileobj_lookup(fileobj)]\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n if events !=key.events:\n self.unregister(fileobj)\n key=self.register(fileobj,events,data)\n elif data !=key.data:\n \n key=key._replace(data=data)\n self._fd_to_key[key.fd]=key\n return key\n \n def close(self):\n self._fd_to_key.clear()\n self._map=None\n \n def get_map(self):\n return self._map\n \n def _key_from_fd(self,fd):\n ''\n\n\n\n\n\n\n \n try :\n return self._fd_to_key[fd]\n except KeyError:\n return None\n \n \nclass SelectSelector(_BaseSelectorImpl):\n ''\n \n def __init__(self):\n super().__init__()\n self._readers=set()\n self._writers=set()\n \n def register(self,fileobj,events,data=None ):\n key=super().register(fileobj,events,data)\n if events&EVENT_READ:\n self._readers.add(key.fd)\n if events&EVENT_WRITE:\n self._writers.add(key.fd)\n return key\n \n def unregister(self,fileobj):\n key=super().unregister(fileobj)\n self._readers.discard(key.fd)\n self._writers.discard(key.fd)\n return key\n \n if sys.platform =='win32':\n def _select(self,r,w,_,timeout=None ):\n r,w,x=select.select(r,w,w,timeout)\n return r,w+x,[]\n else :\n _select=select.select\n \n def select(self,timeout=None ):\n timeout=None if timeout is None else max(timeout,0)\n ready=[]\n try :\n r,w,_=self._select(self._readers,self._writers,[],timeout)\n except InterruptedError:\n return ready\n r=set(r)\n w=set(w)\n for fd in r |w:\n events=0\n if fd in r:\n events |=EVENT_READ\n if fd in w:\n events |=EVENT_WRITE\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n \nclass _PollLikeSelector(_BaseSelectorImpl):\n ''\n _selector_cls=None\n _EVENT_READ=None\n _EVENT_WRITE=None\n \n def __init__(self):\n super().__init__()\n self._selector=self._selector_cls()\n \n def register(self,fileobj,events,data=None ):\n key=super().register(fileobj,events,data)\n poller_events=0\n if events&EVENT_READ:\n poller_events |=self._EVENT_READ\n if events&EVENT_WRITE:\n poller_events |=self._EVENT_WRITE\n try :\n self._selector.register(key.fd,poller_events)\n except :\n super().unregister(fileobj)\n raise\n return key\n \n def unregister(self,fileobj):\n key=super().unregister(fileobj)\n try :\n self._selector.unregister(key.fd)\n except OSError:\n \n \n pass\n return key\n \n def modify(self,fileobj,events,data=None ):\n try :\n key=self._fd_to_key[self._fileobj_lookup(fileobj)]\n except KeyError:\n raise KeyError(f\"{fileobj!r} is not registered\")from None\n \n changed=False\n if events !=key.events:\n selector_events=0\n if events&EVENT_READ:\n selector_events |=self._EVENT_READ\n if events&EVENT_WRITE:\n selector_events |=self._EVENT_WRITE\n try :\n self._selector.modify(key.fd,selector_events)\n except :\n super().unregister(fileobj)\n raise\n changed=True\n if data !=key.data:\n changed=True\n \n if changed:\n key=key._replace(events=events,data=data)\n self._fd_to_key[key.fd]=key\n return key\n \n def select(self,timeout=None ):\n \n \n if timeout is None :\n timeout=None\n elif timeout <=0:\n timeout=0\n else :\n \n \n timeout=math.ceil(timeout *1e3)\n ready=[]\n try :\n fd_event_list=self._selector.poll(timeout)\n except InterruptedError:\n return ready\n for fd,event in fd_event_list:\n events=0\n if event&~self._EVENT_READ:\n events |=EVENT_WRITE\n if event&~self._EVENT_WRITE:\n events |=EVENT_READ\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n \nif hasattr(select,'poll'):\n\n class PollSelector(_PollLikeSelector):\n ''\n _selector_cls=select.poll\n _EVENT_READ=select.POLLIN\n _EVENT_WRITE=select.POLLOUT\n \n \nif hasattr(select,'epoll'):\n\n class EpollSelector(_PollLikeSelector):\n ''\n _selector_cls=select.epoll\n _EVENT_READ=select.EPOLLIN\n _EVENT_WRITE=select.EPOLLOUT\n \n def fileno(self):\n return self._selector.fileno()\n \n def select(self,timeout=None ):\n if timeout is None :\n timeout=-1\n elif timeout <=0:\n timeout=0\n else :\n \n \n timeout=math.ceil(timeout *1e3)*1e-3\n \n \n \n \n max_ev=max(len(self._fd_to_key),1)\n \n ready=[]\n try :\n fd_event_list=self._selector.poll(timeout,max_ev)\n except InterruptedError:\n return ready\n for fd,event in fd_event_list:\n events=0\n if event&~select.EPOLLIN:\n events |=EVENT_WRITE\n if event&~select.EPOLLOUT:\n events |=EVENT_READ\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n def close(self):\n self._selector.close()\n super().close()\n \n \nif hasattr(select,'devpoll'):\n\n class DevpollSelector(_PollLikeSelector):\n ''\n _selector_cls=select.devpoll\n _EVENT_READ=select.POLLIN\n _EVENT_WRITE=select.POLLOUT\n \n def fileno(self):\n return self._selector.fileno()\n \n def close(self):\n self._selector.close()\n super().close()\n \n \nif hasattr(select,'kqueue'):\n\n class KqueueSelector(_BaseSelectorImpl):\n ''\n \n def __init__(self):\n super().__init__()\n self._selector=select.kqueue()\n \n def fileno(self):\n return self._selector.fileno()\n \n def register(self,fileobj,events,data=None ):\n key=super().register(fileobj,events,data)\n try :\n if events&EVENT_READ:\n kev=select.kevent(key.fd,select.KQ_FILTER_READ,\n select.KQ_EV_ADD)\n self._selector.control([kev],0,0)\n if events&EVENT_WRITE:\n kev=select.kevent(key.fd,select.KQ_FILTER_WRITE,\n select.KQ_EV_ADD)\n self._selector.control([kev],0,0)\n except :\n super().unregister(fileobj)\n raise\n return key\n \n def unregister(self,fileobj):\n key=super().unregister(fileobj)\n if key.events&EVENT_READ:\n kev=select.kevent(key.fd,select.KQ_FILTER_READ,\n select.KQ_EV_DELETE)\n try :\n self._selector.control([kev],0,0)\n except OSError:\n \n \n pass\n if key.events&EVENT_WRITE:\n kev=select.kevent(key.fd,select.KQ_FILTER_WRITE,\n select.KQ_EV_DELETE)\n try :\n self._selector.control([kev],0,0)\n except OSError:\n \n pass\n return key\n \n def select(self,timeout=None ):\n timeout=None if timeout is None else max(timeout,0)\n max_ev=len(self._fd_to_key)\n ready=[]\n try :\n kev_list=self._selector.control(None ,max_ev,timeout)\n except InterruptedError:\n return ready\n for kev in kev_list:\n fd=kev.ident\n flag=kev.filter\n events=0\n if flag ==select.KQ_FILTER_READ:\n events |=EVENT_READ\n if flag ==select.KQ_FILTER_WRITE:\n events |=EVENT_WRITE\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n def close(self):\n self._selector.close()\n super().close()\n \n \n \n \n \nif 'KqueueSelector'in globals():\n DefaultSelector=KqueueSelector\nelif 'EpollSelector'in globals():\n DefaultSelector=EpollSelector\nelif 'DevpollSelector'in globals():\n DefaultSelector=DevpollSelector\nelif 'PollSelector'in globals():\n DefaultSelector=PollSelector\nelse :\n DefaultSelector=SelectSelector\n", ["abc", "collections", "collections.abc", "math", "select", "sys"]], "shlex": [".py", "''\n\n\n\n\n\n\n\n\nimport os\nimport re\nimport sys\nfrom collections import deque\n\nfrom io import StringIO\n\n__all__=[\"shlex\",\"split\",\"quote\"]\n\nclass shlex:\n ''\n def __init__(self,instream=None ,infile=None ,posix=False ,\n punctuation_chars=False ):\n if isinstance(instream,str):\n instream=StringIO(instream)\n if instream is not None :\n self.instream=instream\n self.infile=infile\n else :\n self.instream=sys.stdin\n self.infile=None\n self.posix=posix\n if posix:\n self.eof=None\n else :\n self.eof=''\n self.commenters='#'\n self.wordchars=('abcdfeghijklmnopqrstuvwxyz'\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')\n if self.posix:\n self.wordchars +=('\u00df\u00e0\u00e1\u00e2\u00e3\u00e4\u00e5\u00e6\u00e7\u00e8\u00e9\u00ea\u00eb\u00ec\u00ed\u00ee\u00ef\u00f0\u00f1\u00f2\u00f3\u00f4\u00f5\u00f6\u00f8\u00f9\u00fa\u00fb\u00fc\u00fd\u00fe\u00ff'\n '\u00c0\u00c1\u00c2\u00c3\u00c4\u00c5\u00c6\u00c7\u00c8\u00c9\u00ca\u00cb\u00cc\u00cd\u00ce\u00cf\u00d0\u00d1\u00d2\u00d3\u00d4\u00d5\u00d6\u00d8\u00d9\u00da\u00db\u00dc\u00dd\u00de')\n self.whitespace=' \\t\\r\\n'\n self.whitespace_split=False\n self.quotes='\\'\"'\n self.escape='\\\\'\n self.escapedquotes='\"'\n self.state=' '\n self.pushback=deque()\n self.lineno=1\n self.debug=0\n self.token=''\n self.filestack=deque()\n self.source=None\n if not punctuation_chars:\n punctuation_chars=''\n elif punctuation_chars is True :\n punctuation_chars='();<>|&'\n self.punctuation_chars=punctuation_chars\n if punctuation_chars:\n \n self._pushback_chars=deque()\n \n self.wordchars +='~-./*?='\n \n t=self.wordchars.maketrans(dict.fromkeys(punctuation_chars))\n self.wordchars=self.wordchars.translate(t)\n \n def push_token(self,tok):\n ''\n if self.debug >=1:\n print(\"shlex: pushing token \"+repr(tok))\n self.pushback.appendleft(tok)\n \n def push_source(self,newstream,newfile=None ):\n ''\n if isinstance(newstream,str):\n newstream=StringIO(newstream)\n self.filestack.appendleft((self.infile,self.instream,self.lineno))\n self.infile=newfile\n self.instream=newstream\n self.lineno=1\n if self.debug:\n if newfile is not None :\n print('shlex: pushing to file %s'%(self.infile,))\n else :\n print('shlex: pushing to stream %s'%(self.instream,))\n \n def pop_source(self):\n ''\n self.instream.close()\n (self.infile,self.instream,self.lineno)=self.filestack.popleft()\n if self.debug:\n print('shlex: popping to %s, line %d'\\\n %(self.instream,self.lineno))\n self.state=' '\n \n def get_token(self):\n ''\n if self.pushback:\n tok=self.pushback.popleft()\n if self.debug >=1:\n print(\"shlex: popping token \"+repr(tok))\n return tok\n \n raw=self.read_token()\n \n if self.source is not None :\n while raw ==self.source:\n spec=self.sourcehook(self.read_token())\n if spec:\n (newfile,newstream)=spec\n self.push_source(newstream,newfile)\n raw=self.get_token()\n \n while raw ==self.eof:\n if not self.filestack:\n return self.eof\n else :\n self.pop_source()\n raw=self.get_token()\n \n if self.debug >=1:\n if raw !=self.eof:\n print(\"shlex: token=\"+repr(raw))\n else :\n print(\"shlex: token=EOF\")\n return raw\n \n def read_token(self):\n quoted=False\n escapedstate=' '\n while True :\n if self.punctuation_chars and self._pushback_chars:\n nextchar=self._pushback_chars.pop()\n else :\n nextchar=self.instream.read(1)\n if nextchar =='\\n':\n self.lineno +=1\n if self.debug >=3:\n print(\"shlex: in state %r I see character: %r\"%(self.state,\n nextchar))\n if self.state is None :\n self.token=''\n break\n elif self.state ==' ':\n if not nextchar:\n self.state=None\n break\n elif nextchar in self.whitespace:\n if self.debug >=2:\n print(\"shlex: I see whitespace in whitespace state\")\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif nextchar in self.commenters:\n self.instream.readline()\n self.lineno +=1\n elif self.posix and nextchar in self.escape:\n escapedstate='a'\n self.state=nextchar\n elif nextchar in self.wordchars:\n self.token=nextchar\n self.state='a'\n elif nextchar in self.punctuation_chars:\n self.token=nextchar\n self.state='c'\n elif nextchar in self.quotes:\n if not self.posix:\n self.token=nextchar\n self.state=nextchar\n elif self.whitespace_split:\n self.token=nextchar\n self.state='a'\n else :\n self.token=nextchar\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif self.state in self.quotes:\n quoted=True\n if not nextchar:\n if self.debug >=2:\n print(\"shlex: I see EOF in quotes state\")\n \n raise ValueError(\"No closing quotation\")\n if nextchar ==self.state:\n if not self.posix:\n self.token +=nextchar\n self.state=' '\n break\n else :\n self.state='a'\n elif (self.posix and nextchar in self.escape and self.state\n in self.escapedquotes):\n escapedstate=self.state\n self.state=nextchar\n else :\n self.token +=nextchar\n elif self.state in self.escape:\n if not nextchar:\n if self.debug >=2:\n print(\"shlex: I see EOF in escape state\")\n \n raise ValueError(\"No escaped character\")\n \n \n if (escapedstate in self.quotes and\n nextchar !=self.state and nextchar !=escapedstate):\n self.token +=self.state\n self.token +=nextchar\n self.state=escapedstate\n elif self.state in ('a','c'):\n if not nextchar:\n self.state=None\n break\n elif nextchar in self.whitespace:\n if self.debug >=2:\n print(\"shlex: I see whitespace in word state\")\n self.state=' '\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif nextchar in self.commenters:\n self.instream.readline()\n self.lineno +=1\n if self.posix:\n self.state=' '\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif self.state =='c':\n if nextchar in self.punctuation_chars:\n self.token +=nextchar\n else :\n if nextchar not in self.whitespace:\n self._pushback_chars.append(nextchar)\n self.state=' '\n break\n elif self.posix and nextchar in self.quotes:\n self.state=nextchar\n elif self.posix and nextchar in self.escape:\n escapedstate='a'\n self.state=nextchar\n elif (nextchar in self.wordchars or nextchar in self.quotes\n or self.whitespace_split):\n self.token +=nextchar\n else :\n if self.punctuation_chars:\n self._pushback_chars.append(nextchar)\n else :\n self.pushback.appendleft(nextchar)\n if self.debug >=2:\n print(\"shlex: I see punctuation in word state\")\n self.state=' '\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n result=self.token\n self.token=''\n if self.posix and not quoted and result =='':\n result=None\n if self.debug >1:\n if result:\n print(\"shlex: raw token=\"+repr(result))\n else :\n print(\"shlex: raw token=EOF\")\n return result\n \n def sourcehook(self,newfile):\n ''\n if newfile[0]=='\"':\n newfile=newfile[1:-1]\n \n if isinstance(self.infile,str)and not os.path.isabs(newfile):\n newfile=os.path.join(os.path.dirname(self.infile),newfile)\n return (newfile,open(newfile,\"r\"))\n \n def error_leader(self,infile=None ,lineno=None ):\n ''\n if infile is None :\n infile=self.infile\n if lineno is None :\n lineno=self.lineno\n return \"\\\"%s\\\", line %d: \"%(infile,lineno)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n token=self.get_token()\n if token ==self.eof:\n raise StopIteration\n return token\n \ndef split(s,comments=False ,posix=True ):\n lex=shlex(s,posix=posix)\n lex.whitespace_split=True\n if not comments:\n lex.commenters=''\n return list(lex)\n \n \n_find_unsafe=re.compile(r'[^\\w@%+=:,./-]',re.ASCII).search\n\ndef quote(s):\n ''\n if not s:\n return \"''\"\n if _find_unsafe(s)is None :\n return s\n \n \n \n return \"'\"+s.replace(\"'\",\"'\\\"'\\\"'\")+\"'\"\n \n \ndef _print_tokens(lexer):\n while 1:\n tt=lexer.get_token()\n if not tt:\n break\n print(\"Token: \"+repr(tt))\n \nif __name__ =='__main__':\n if len(sys.argv)==1:\n _print_tokens(shlex())\n else :\n fn=sys.argv[1]\n with open(fn)as f:\n _print_tokens(shlex(f,fn))\n", ["collections", "io", "os", "re", "sys"]], "shutil": [".py", "''\n\n\n\n\n\nimport os\nimport sys\nimport stat\nimport fnmatch\nimport collections\nimport errno\n\ntry :\n import zlib\n del zlib\n _ZLIB_SUPPORTED=True\nexcept ImportError:\n _ZLIB_SUPPORTED=False\n \ntry :\n import bz2\n del bz2\n _BZ2_SUPPORTED=True\nexcept ImportError:\n _BZ2_SUPPORTED=False\n \ntry :\n import lzma\n del lzma\n _LZMA_SUPPORTED=True\nexcept ImportError:\n _LZMA_SUPPORTED=False\n \ntry :\n from pwd import getpwnam\nexcept ImportError:\n getpwnam=None\n \ntry :\n from grp import getgrnam\nexcept ImportError:\n getgrnam=None\n \n__all__=[\"copyfileobj\",\"copyfile\",\"copymode\",\"copystat\",\"copy\",\"copy2\",\n\"copytree\",\"move\",\"rmtree\",\"Error\",\"SpecialFileError\",\n\"ExecError\",\"make_archive\",\"get_archive_formats\",\n\"register_archive_format\",\"unregister_archive_format\",\n\"get_unpack_formats\",\"register_unpack_format\",\n\"unregister_unpack_format\",\"unpack_archive\",\n\"ignore_patterns\",\"chown\",\"which\",\"get_terminal_size\",\n\"SameFileError\"]\n\n\nclass Error(OSError):\n pass\n \nclass SameFileError(Error):\n ''\n \nclass SpecialFileError(OSError):\n ''\n \n \nclass ExecError(OSError):\n ''\n \nclass ReadError(OSError):\n ''\n \nclass RegistryError(Exception):\n ''\n \n \n \ndef copyfileobj(fsrc,fdst,length=16 *1024):\n ''\n while 1:\n buf=fsrc.read(length)\n if not buf:\n break\n fdst.write(buf)\n \ndef _samefile(src,dst):\n\n if hasattr(os.path,'samefile'):\n try :\n return os.path.samefile(src,dst)\n except OSError:\n return False\n \n \n return (os.path.normcase(os.path.abspath(src))==\n os.path.normcase(os.path.abspath(dst)))\n \ndef copyfile(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n \n if _samefile(src,dst):\n raise SameFileError(\"{!r} and {!r} are the same file\".format(src,dst))\n \n for fn in [src,dst]:\n try :\n st=os.stat(fn)\n except OSError:\n \n pass\n else :\n \n if stat.S_ISFIFO(st.st_mode):\n raise SpecialFileError(\"`%s` is a named pipe\"%fn)\n \n if not follow_symlinks and os.path.islink(src):\n os.symlink(os.readlink(src),dst)\n else :\n with open(src,'rb')as fsrc:\n with open(dst,'wb')as fdst:\n copyfileobj(fsrc,fdst)\n return dst\n \ndef copymode(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n \n if not follow_symlinks and os.path.islink(src)and os.path.islink(dst):\n if hasattr(os,'lchmod'):\n stat_func,chmod_func=os.lstat,os.lchmod\n else :\n return\n elif hasattr(os,'chmod'):\n stat_func,chmod_func=os.stat,os.chmod\n else :\n return\n \n st=stat_func(src)\n chmod_func(dst,stat.S_IMODE(st.st_mode))\n \nif hasattr(os,'listxattr'):\n def _copyxattr(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n \n \n try :\n names=os.listxattr(src,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in (errno.ENOTSUP,errno.ENODATA):\n raise\n return\n for name in names:\n try :\n value=os.getxattr(src,name,follow_symlinks=follow_symlinks)\n os.setxattr(dst,name,value,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in (errno.EPERM,errno.ENOTSUP,errno.ENODATA):\n raise\nelse :\n def _copyxattr(*args,**kwargs):\n pass\n \ndef copystat(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n \n def _nop(*args,ns=None ,follow_symlinks=None ):\n pass\n \n \n follow=follow_symlinks or not (os.path.islink(src)and os.path.islink(dst))\n if follow:\n \n def lookup(name):\n return getattr(os,name,_nop)\n else :\n \n \n def lookup(name):\n fn=getattr(os,name,_nop)\n if fn in os.supports_follow_symlinks:\n return fn\n return _nop\n \n st=lookup(\"stat\")(src,follow_symlinks=follow)\n mode=stat.S_IMODE(st.st_mode)\n lookup(\"utime\")(dst,ns=(st.st_atime_ns,st.st_mtime_ns),\n follow_symlinks=follow)\n try :\n lookup(\"chmod\")(dst,mode,follow_symlinks=follow)\n except NotImplementedError:\n \n \n \n \n \n \n \n \n \n \n pass\n if hasattr(st,'st_flags'):\n try :\n lookup(\"chflags\")(dst,st.st_flags,follow_symlinks=follow)\n except OSError as why:\n for err in 'EOPNOTSUPP','ENOTSUP':\n if hasattr(errno,err)and why.errno ==getattr(errno,err):\n break\n else :\n raise\n _copyxattr(src,dst,follow_symlinks=follow)\n \ndef copy(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copymode(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef copy2(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copystat(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef ignore_patterns(*patterns):\n ''\n\n\n \n def _ignore_patterns(path,names):\n ignored_names=[]\n for pattern in patterns:\n ignored_names.extend(fnmatch.filter(names,pattern))\n return set(ignored_names)\n return _ignore_patterns\n \ndef copytree(src,dst,symlinks=False ,ignore=None ,copy_function=copy2,\nignore_dangling_symlinks=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n names=os.listdir(src)\n if ignore is not None :\n ignored_names=ignore(src,names)\n else :\n ignored_names=set()\n \n os.makedirs(dst)\n errors=[]\n for name in names:\n if name in ignored_names:\n continue\n srcname=os.path.join(src,name)\n dstname=os.path.join(dst,name)\n try :\n if os.path.islink(srcname):\n linkto=os.readlink(srcname)\n if symlinks:\n \n \n \n os.symlink(linkto,dstname)\n copystat(srcname,dstname,follow_symlinks=not symlinks)\n else :\n \n if not os.path.exists(linkto)and ignore_dangling_symlinks:\n continue\n \n if os.path.isdir(srcname):\n copytree(srcname,dstname,symlinks,ignore,\n copy_function)\n else :\n copy_function(srcname,dstname)\n elif os.path.isdir(srcname):\n copytree(srcname,dstname,symlinks,ignore,copy_function)\n else :\n \n copy_function(srcname,dstname)\n \n \n except Error as err:\n errors.extend(err.args[0])\n except OSError as why:\n errors.append((srcname,dstname,str(why)))\n try :\n copystat(src,dst)\n except OSError as why:\n \n if getattr(why,'winerror',None )is None :\n errors.append((src,dst,str(why)))\n if errors:\n raise Error(errors)\n return dst\n \n \ndef _rmtree_unsafe(path,onerror):\n try :\n with os.scandir(path)as scandir_it:\n entries=list(scandir_it)\n except OSError:\n onerror(os.scandir,path,sys.exc_info())\n entries=[]\n for entry in entries:\n fullname=entry.path\n try :\n is_dir=entry.is_dir(follow_symlinks=False )\n except OSError:\n is_dir=False\n if is_dir:\n try :\n if entry.is_symlink():\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError:\n onerror(os.path.islink,fullname,sys.exc_info())\n continue\n _rmtree_unsafe(fullname,onerror)\n else :\n try :\n os.unlink(fullname)\n except OSError:\n onerror(os.unlink,fullname,sys.exc_info())\n try :\n os.rmdir(path)\n except OSError:\n onerror(os.rmdir,path,sys.exc_info())\n \n \ndef _rmtree_safe_fd(topfd,path,onerror):\n try :\n with os.scandir(topfd)as scandir_it:\n entries=list(scandir_it)\n except OSError as err:\n err.filename=path\n onerror(os.scandir,path,sys.exc_info())\n return\n for entry in entries:\n fullname=os.path.join(path,entry.name)\n try :\n is_dir=entry.is_dir(follow_symlinks=False )\n if is_dir:\n orig_st=entry.stat(follow_symlinks=False )\n is_dir=stat.S_ISDIR(orig_st.st_mode)\n except OSError:\n is_dir=False\n if is_dir:\n try :\n dirfd=os.open(entry.name,os.O_RDONLY,dir_fd=topfd)\n except OSError:\n onerror(os.open,fullname,sys.exc_info())\n else :\n try :\n if os.path.samestat(orig_st,os.fstat(dirfd)):\n _rmtree_safe_fd(dirfd,fullname,onerror)\n try :\n os.rmdir(entry.name,dir_fd=topfd)\n except OSError:\n onerror(os.rmdir,fullname,sys.exc_info())\n else :\n try :\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic \"\n \"link\")\n except OSError:\n onerror(os.path.islink,fullname,sys.exc_info())\n finally :\n os.close(dirfd)\n else :\n try :\n os.unlink(entry.name,dir_fd=topfd)\n except OSError:\n onerror(os.unlink,fullname,sys.exc_info())\n \n_use_fd_functions=({os.open,os.stat,os.unlink,os.rmdir}<=\nos.supports_dir_fd and\nos.scandir in os.supports_fd and\nos.stat in os.supports_follow_symlinks)\n\ndef rmtree(path,ignore_errors=False ,onerror=None ):\n ''\n\n\n\n\n\n\n\n\n \n if ignore_errors:\n def onerror(*args):\n pass\n elif onerror is None :\n def onerror(*args):\n raise\n if _use_fd_functions:\n \n if isinstance(path,bytes):\n path=os.fsdecode(path)\n \n \n try :\n orig_st=os.lstat(path)\n except Exception:\n onerror(os.lstat,path,sys.exc_info())\n return\n try :\n fd=os.open(path,os.O_RDONLY)\n except Exception:\n onerror(os.lstat,path,sys.exc_info())\n return\n try :\n if os.path.samestat(orig_st,os.fstat(fd)):\n _rmtree_safe_fd(fd,path,onerror)\n try :\n os.rmdir(path)\n except OSError:\n onerror(os.rmdir,path,sys.exc_info())\n else :\n try :\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError:\n onerror(os.path.islink,path,sys.exc_info())\n finally :\n os.close(fd)\n else :\n try :\n if os.path.islink(path):\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError:\n onerror(os.path.islink,path,sys.exc_info())\n \n return\n return _rmtree_unsafe(path,onerror)\n \n \n \nrmtree.avoids_symlink_attacks=_use_fd_functions\n\ndef _basename(path):\n\n\n sep=os.path.sep+(os.path.altsep or '')\n return os.path.basename(path.rstrip(sep))\n \ndef move(src,dst,copy_function=copy2):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n real_dst=dst\n if os.path.isdir(dst):\n if _samefile(src,dst):\n \n \n os.rename(src,dst)\n return\n \n real_dst=os.path.join(dst,_basename(src))\n if os.path.exists(real_dst):\n raise Error(\"Destination path '%s' already exists\"%real_dst)\n try :\n os.rename(src,real_dst)\n except OSError:\n if os.path.islink(src):\n linkto=os.readlink(src)\n os.symlink(linkto,real_dst)\n os.unlink(src)\n elif os.path.isdir(src):\n if _destinsrc(src,dst):\n raise Error(\"Cannot move a directory '%s' into itself\"\n \" '%s'.\"%(src,dst))\n copytree(src,real_dst,copy_function=copy_function,\n symlinks=True )\n rmtree(src)\n else :\n copy_function(src,real_dst)\n os.unlink(src)\n return real_dst\n \ndef _destinsrc(src,dst):\n src=os.path.abspath(src)\n dst=os.path.abspath(dst)\n if not src.endswith(os.path.sep):\n src +=os.path.sep\n if not dst.endswith(os.path.sep):\n dst +=os.path.sep\n return dst.startswith(src)\n \ndef _get_gid(name):\n ''\n if getgrnam is None or name is None :\n return None\n try :\n result=getgrnam(name)\n except KeyError:\n result=None\n if result is not None :\n return result[2]\n return None\n \ndef _get_uid(name):\n ''\n if getpwnam is None or name is None :\n return None\n try :\n result=getpwnam(name)\n except KeyError:\n result=None\n if result is not None :\n return result[2]\n return None\n \ndef _make_tarball(base_name,base_dir,compress=\"gzip\",verbose=0,dry_run=0,\nowner=None ,group=None ,logger=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if compress is None :\n tar_compression=''\n elif _ZLIB_SUPPORTED and compress =='gzip':\n tar_compression='gz'\n elif _BZ2_SUPPORTED and compress =='bzip2':\n tar_compression='bz2'\n elif _LZMA_SUPPORTED and compress =='xz':\n tar_compression='xz'\n else :\n raise ValueError(\"bad value for 'compress', or compression format not \"\n \"supported : {0}\".format(compress))\n \n import tarfile\n \n compress_ext='.'+tar_compression if compress else ''\n archive_name=base_name+'.tar'+compress_ext\n archive_dir=os.path.dirname(archive_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None :\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n \n if logger is not None :\n logger.info('Creating tar archive')\n \n uid=_get_uid(owner)\n gid=_get_gid(group)\n \n def _set_uid_gid(tarinfo):\n if gid is not None :\n tarinfo.gid=gid\n tarinfo.gname=group\n if uid is not None :\n tarinfo.uid=uid\n tarinfo.uname=owner\n return tarinfo\n \n if not dry_run:\n tar=tarfile.open(archive_name,'w|%s'%tar_compression)\n try :\n tar.add(base_dir,filter=_set_uid_gid)\n finally :\n tar.close()\n \n return archive_name\n \ndef _make_zipfile(base_name,base_dir,verbose=0,dry_run=0,logger=None ):\n ''\n\n\n\n \n import zipfile\n \n zip_filename=base_name+\".zip\"\n archive_dir=os.path.dirname(base_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None :\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n if logger is not None :\n logger.info(\"creating '%s' and adding '%s' to it\",\n zip_filename,base_dir)\n \n if not dry_run:\n with zipfile.ZipFile(zip_filename,\"w\",\n compression=zipfile.ZIP_DEFLATED)as zf:\n path=os.path.normpath(base_dir)\n if path !=os.curdir:\n zf.write(path,path)\n if logger is not None :\n logger.info(\"adding '%s'\",path)\n for dirpath,dirnames,filenames in os.walk(base_dir):\n for name in sorted(dirnames):\n path=os.path.normpath(os.path.join(dirpath,name))\n zf.write(path,path)\n if logger is not None :\n logger.info(\"adding '%s'\",path)\n for name in filenames:\n path=os.path.normpath(os.path.join(dirpath,name))\n if os.path.isfile(path):\n zf.write(path,path)\n if logger is not None :\n logger.info(\"adding '%s'\",path)\n \n return zip_filename\n \n_ARCHIVE_FORMATS={\n'tar':(_make_tarball,[('compress',None )],\"uncompressed tar file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _ARCHIVE_FORMATS['gztar']=(_make_tarball,[('compress','gzip')],\n \"gzip'ed tar-file\")\n _ARCHIVE_FORMATS['zip']=(_make_zipfile,[],\"ZIP file\")\n \nif _BZ2_SUPPORTED:\n _ARCHIVE_FORMATS['bztar']=(_make_tarball,[('compress','bzip2')],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _ARCHIVE_FORMATS['xztar']=(_make_tarball,[('compress','xz')],\n \"xz'ed tar-file\")\n \ndef get_archive_formats():\n ''\n\n\n \n formats=[(name,registry[2])for name,registry in\n _ARCHIVE_FORMATS.items()]\n formats.sort()\n return formats\n \ndef register_archive_format(name,function,extra_args=None ,description=''):\n ''\n\n\n\n\n\n\n \n if extra_args is None :\n extra_args=[]\n if not callable(function):\n raise TypeError('The %s object is not callable'%function)\n if not isinstance(extra_args,(tuple,list)):\n raise TypeError('extra_args needs to be a sequence')\n for element in extra_args:\n if not isinstance(element,(tuple,list))or len(element)!=2:\n raise TypeError('extra_args elements are : (arg_name, value)')\n \n _ARCHIVE_FORMATS[name]=(function,extra_args,description)\n \ndef unregister_archive_format(name):\n del _ARCHIVE_FORMATS[name]\n \ndef make_archive(base_name,format,root_dir=None ,base_dir=None ,verbose=0,\ndry_run=0,owner=None ,group=None ,logger=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n save_cwd=os.getcwd()\n if root_dir is not None :\n if logger is not None :\n logger.debug(\"changing into '%s'\",root_dir)\n base_name=os.path.abspath(base_name)\n if not dry_run:\n os.chdir(root_dir)\n \n if base_dir is None :\n base_dir=os.curdir\n \n kwargs={'dry_run':dry_run,'logger':logger}\n \n try :\n format_info=_ARCHIVE_FORMATS[format]\n except KeyError:\n raise ValueError(\"unknown archive format '%s'\"%format)from None\n \n func=format_info[0]\n for arg,val in format_info[1]:\n kwargs[arg]=val\n \n if format !='zip':\n kwargs['owner']=owner\n kwargs['group']=group\n \n try :\n filename=func(base_name,base_dir,**kwargs)\n finally :\n if root_dir is not None :\n if logger is not None :\n logger.debug(\"changing back to '%s'\",save_cwd)\n os.chdir(save_cwd)\n \n return filename\n \n \ndef get_unpack_formats():\n ''\n\n\n\n \n formats=[(name,info[0],info[3])for name,info in\n _UNPACK_FORMATS.items()]\n formats.sort()\n return formats\n \ndef _check_unpack_options(extensions,function,extra_args):\n ''\n \n existing_extensions={}\n for name,info in _UNPACK_FORMATS.items():\n for ext in info[0]:\n existing_extensions[ext]=name\n \n for extension in extensions:\n if extension in existing_extensions:\n msg='%s is already registered for \"%s\"'\n raise RegistryError(msg %(extension,\n existing_extensions[extension]))\n \n if not callable(function):\n raise TypeError('The registered function must be a callable')\n \n \ndef register_unpack_format(name,extensions,function,extra_args=None ,\ndescription=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if extra_args is None :\n extra_args=[]\n _check_unpack_options(extensions,function,extra_args)\n _UNPACK_FORMATS[name]=extensions,function,extra_args,description\n \ndef unregister_unpack_format(name):\n ''\n del _UNPACK_FORMATS[name]\n \ndef _ensure_directory(path):\n ''\n dirname=os.path.dirname(path)\n if not os.path.isdir(dirname):\n os.makedirs(dirname)\n \ndef _unpack_zipfile(filename,extract_dir):\n ''\n \n import zipfile\n \n if not zipfile.is_zipfile(filename):\n raise ReadError(\"%s is not a zip file\"%filename)\n \n zip=zipfile.ZipFile(filename)\n try :\n for info in zip.infolist():\n name=info.filename\n \n \n if name.startswith('/')or '..'in name:\n continue\n \n target=os.path.join(extract_dir,*name.split('/'))\n if not target:\n continue\n \n _ensure_directory(target)\n if not name.endswith('/'):\n \n data=zip.read(info.filename)\n f=open(target,'wb')\n try :\n f.write(data)\n finally :\n f.close()\n del data\n finally :\n zip.close()\n \ndef _unpack_tarfile(filename,extract_dir):\n ''\n \n import tarfile\n try :\n tarobj=tarfile.open(filename)\n except tarfile.TarError:\n raise ReadError(\n \"%s is not a compressed or uncompressed tar file\"%filename)\n try :\n tarobj.extractall(extract_dir)\n finally :\n tarobj.close()\n \n_UNPACK_FORMATS={\n'tar':(['.tar'],_unpack_tarfile,[],\"uncompressed tar file\"),\n'zip':(['.zip'],_unpack_zipfile,[],\"ZIP file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _UNPACK_FORMATS['gztar']=(['.tar.gz','.tgz'],_unpack_tarfile,[],\n \"gzip'ed tar-file\")\n \nif _BZ2_SUPPORTED:\n _UNPACK_FORMATS['bztar']=(['.tar.bz2','.tbz2'],_unpack_tarfile,[],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _UNPACK_FORMATS['xztar']=(['.tar.xz','.txz'],_unpack_tarfile,[],\n \"xz'ed tar-file\")\n \ndef _find_unpack_format(filename):\n for name,info in _UNPACK_FORMATS.items():\n for extension in info[0]:\n if filename.endswith(extension):\n return name\n return None\n \ndef unpack_archive(filename,extract_dir=None ,format=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if extract_dir is None :\n extract_dir=os.getcwd()\n \n extract_dir=os.fspath(extract_dir)\n filename=os.fspath(filename)\n \n if format is not None :\n try :\n format_info=_UNPACK_FORMATS[format]\n except KeyError:\n raise ValueError(\"Unknown unpack format '{0}'\".format(format))from None\n \n func=format_info[1]\n func(filename,extract_dir,**dict(format_info[2]))\n else :\n \n format=_find_unpack_format(filename)\n if format is None :\n raise ReadError(\"Unknown archive format '{0}'\".format(filename))\n \n func=_UNPACK_FORMATS[format][1]\n kwargs=dict(_UNPACK_FORMATS[format][2])\n func(filename,extract_dir,**kwargs)\n \n \nif hasattr(os,'statvfs'):\n\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n _ntuple_diskusage.total.__doc__='Total space in bytes'\n _ntuple_diskusage.used.__doc__='Used space in bytes'\n _ntuple_diskusage.free.__doc__='Free space in bytes'\n \n def disk_usage(path):\n ''\n\n\n\n \n st=os.statvfs(path)\n free=st.f_bavail *st.f_frsize\n total=st.f_blocks *st.f_frsize\n used=(st.f_blocks -st.f_bfree)*st.f_frsize\n return _ntuple_diskusage(total,used,free)\n \nelif os.name =='nt':\n\n import nt\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n \n def disk_usage(path):\n ''\n\n\n\n \n total,free=nt._getdiskusage(path)\n used=total -free\n return _ntuple_diskusage(total,used,free)\n \n \ndef chown(path,user=None ,group=None ):\n ''\n\n\n\n \n \n if user is None and group is None :\n raise ValueError(\"user and/or group must be set\")\n \n _user=user\n _group=group\n \n \n if user is None :\n _user=-1\n \n elif isinstance(user,str):\n _user=_get_uid(user)\n if _user is None :\n raise LookupError(\"no such user: {!r}\".format(user))\n \n if group is None :\n _group=-1\n elif not isinstance(group,int):\n _group=_get_gid(group)\n if _group is None :\n raise LookupError(\"no such group: {!r}\".format(group))\n \n os.chown(path,_user,_group)\n \ndef get_terminal_size(fallback=(80,24)):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try :\n columns=int(os.environ['COLUMNS'])\n except (KeyError,ValueError):\n columns=0\n \n try :\n lines=int(os.environ['LINES'])\n except (KeyError,ValueError):\n lines=0\n \n \n if columns <=0 or lines <=0:\n try :\n size=os.get_terminal_size(sys.__stdout__.fileno())\n except (AttributeError,ValueError,OSError):\n \n \n size=os.terminal_size(fallback)\n if columns <=0:\n columns=size.columns\n if lines <=0:\n lines=size.lines\n \n return os.terminal_size((columns,lines))\n \ndef which(cmd,mode=os.F_OK |os.X_OK,path=None ):\n ''\n\n\n\n\n\n\n\n \n \n \n \n def _access_check(fn,mode):\n return (os.path.exists(fn)and os.access(fn,mode)\n and not os.path.isdir(fn))\n \n \n \n \n if os.path.dirname(cmd):\n if _access_check(cmd,mode):\n return cmd\n return None\n \n if path is None :\n path=os.environ.get(\"PATH\",os.defpath)\n if not path:\n return None\n path=path.split(os.pathsep)\n \n if sys.platform ==\"win32\":\n \n if not os.curdir in path:\n path.insert(0,os.curdir)\n \n \n pathext=os.environ.get(\"PATHEXT\",\"\").split(os.pathsep)\n \n \n \n \n if any(cmd.lower().endswith(ext.lower())for ext in pathext):\n files=[cmd]\n else :\n files=[cmd+ext for ext in pathext]\n else :\n \n \n files=[cmd]\n \n seen=set()\n for dir in path:\n normdir=os.path.normcase(dir)\n if not normdir in seen:\n seen.add(normdir)\n for thefile in files:\n name=os.path.join(dir,thefile)\n if _access_check(name,mode):\n return name\n return None\n", ["bz2", "collections", "errno", "fnmatch", "grp", "lzma", "nt", "os", "pwd", "stat", "sys", "tarfile", "zipfile", "zlib"]], "signal": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nCTRL_BREAK_EVENT=1\nCTRL_C_EVENT=0\nNSIG=23\nSIGABRT=22\nSIGBREAK=21\nSIGFPE=8\nSIGILL=4\nSIGINT=2\nSIGSEGV=11\nSIGTERM=15\nSIG_DFL=0\nSIG_IGN=1\n\ndef signal(signalnum,handler):\n pass\n", []], "site": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport os\nimport builtins\nimport _sitebuiltins\n\n\nPREFIXES=[sys.prefix,sys.exec_prefix]\n\n\nENABLE_USER_SITE=None\n\n\n\n\nUSER_SITE=None\nUSER_BASE=None\n\n\ndef makepath(*paths):\n dir=os.path.join(*paths)\n try :\n dir=os.path.abspath(dir)\n except OSError:\n pass\n return dir,os.path.normcase(dir)\n \n \ndef abs_paths():\n ''\n for m in set(sys.modules.values()):\n if (getattr(getattr(m,'__loader__',None ),'__module__',None )not in\n ('_frozen_importlib','_frozen_importlib_external')):\n continue\n try :\n m.__file__=os.path.abspath(m.__file__)\n except (AttributeError,OSError,TypeError):\n pass\n try :\n m.__cached__=os.path.abspath(m.__cached__)\n except (AttributeError,OSError,TypeError):\n pass\n \n \ndef removeduppaths():\n ''\n \n \n \n L=[]\n known_paths=set()\n for dir in sys.path:\n \n \n \n dir,dircase=makepath(dir)\n if dircase not in known_paths:\n L.append(dir)\n known_paths.add(dircase)\n sys.path[:]=L\n return known_paths\n \n \ndef _init_pathinfo():\n ''\n d=set()\n for item in sys.path:\n try :\n if os.path.exists(item):\n _,itemcase=makepath(item)\n d.add(itemcase)\n except TypeError:\n continue\n return d\n \n \ndef addpackage(sitedir,name,known_paths):\n ''\n\n\n \n if known_paths is None :\n known_paths=_init_pathinfo()\n reset=True\n else :\n reset=False\n fullname=os.path.join(sitedir,name)\n try :\n f=open(fullname,\"r\")\n except OSError:\n return\n with f:\n for n,line in enumerate(f):\n if line.startswith(\"#\"):\n continue\n try :\n if line.startswith((\"import \",\"import\\t\")):\n exec(line)\n continue\n line=line.rstrip()\n dir,dircase=makepath(sitedir,line)\n if not dircase in known_paths and os.path.exists(dir):\n sys.path.append(dir)\n known_paths.add(dircase)\n except Exception:\n print(\"Error processing line {:d} of {}:\\n\".format(n+1,fullname),\n file=sys.stderr)\n import traceback\n for record in traceback.format_exception(*sys.exc_info()):\n for line in record.splitlines():\n print(' '+line,file=sys.stderr)\n print(\"\\nRemainder of file ignored\",file=sys.stderr)\n break\n if reset:\n known_paths=None\n return known_paths\n \n \ndef addsitedir(sitedir,known_paths=None ):\n ''\n \n if known_paths is None :\n known_paths=_init_pathinfo()\n reset=True\n else :\n reset=False\n sitedir,sitedircase=makepath(sitedir)\n if not sitedircase in known_paths:\n sys.path.append(sitedir)\n known_paths.add(sitedircase)\n try :\n names=os.listdir(sitedir)\n except OSError:\n return\n names=[name for name in names if name.endswith(\".pth\")]\n for name in sorted(names):\n addpackage(sitedir,name,known_paths)\n if reset:\n known_paths=None\n return known_paths\n \n \ndef check_enableusersite():\n ''\n\n\n\n\n\n\n\n \n if sys.flags.no_user_site:\n return False\n \n if hasattr(os,\"getuid\")and hasattr(os,\"geteuid\"):\n \n if os.geteuid()!=os.getuid():\n return None\n if hasattr(os,\"getgid\")and hasattr(os,\"getegid\"):\n \n if os.getegid()!=os.getgid():\n return None\n \n return True\n \n \n \n \n \n \n \n \n \ndef _getuserbase():\n env_base=os.environ.get(\"PYTHONUSERBASE\",None )\n if env_base:\n return env_base\n \n def joinuser(*args):\n return os.path.expanduser(os.path.join(*args))\n \n if os.name ==\"nt\":\n base=os.environ.get(\"APPDATA\")or \"~\"\n return joinuser(base,\"Python\")\n \n if sys.platform ==\"darwin\"and sys._framework:\n return joinuser(\"~\",\"Library\",sys._framework,\n \"%d.%d\"%sys.version_info[:2])\n \n return joinuser(\"~\",\".local\")\n \n \n \ndef _get_path(userbase):\n version=sys.version_info\n \n if os.name =='nt':\n return f'{userbase}\\\\Python{version[0]}{version[1]}\\\\site-packages'\n \n if sys.platform =='darwin'and sys._framework:\n return f'{userbase}/lib/python/site-packages'\n \n return f'{userbase}/lib/python{version[0]}.{version[1]}/site-packages'\n \n \ndef getuserbase():\n ''\n\n\n\n\n \n global USER_BASE\n if USER_BASE is None :\n USER_BASE=_getuserbase()\n return USER_BASE\n \n \ndef getusersitepackages():\n ''\n\n\n\n \n global USER_SITE\n userbase=getuserbase()\n \n if USER_SITE is None :\n USER_SITE=_get_path(userbase)\n \n return USER_SITE\n \ndef addusersitepackages(known_paths):\n ''\n\n\n\n \n \n \n user_site=getusersitepackages()\n \n if ENABLE_USER_SITE and os.path.isdir(user_site):\n addsitedir(user_site,known_paths)\n return known_paths\n \ndef getsitepackages(prefixes=None ):\n ''\n\n\n\n\n \n sitepackages=[]\n seen=set()\n \n if prefixes is None :\n prefixes=PREFIXES\n \n for prefix in prefixes:\n if not prefix or prefix in seen:\n continue\n seen.add(prefix)\n \n if os.sep =='/':\n sitepackages.append(os.path.join(prefix,\"lib\",\n \"python%d.%d\"%sys.version_info[:2],\n \"site-packages\"))\n else :\n sitepackages.append(prefix)\n sitepackages.append(os.path.join(prefix,\"lib\",\"site-packages\"))\n return sitepackages\n \ndef addsitepackages(known_paths,prefixes=None ):\n ''\n for sitedir in getsitepackages(prefixes):\n if os.path.isdir(sitedir):\n addsitedir(sitedir,known_paths)\n \n return known_paths\n \ndef setquit():\n ''\n\n\n\n\n \n if os.sep =='\\\\':\n eof='Ctrl-Z plus Return'\n else :\n eof='Ctrl-D (i.e. EOF)'\n \n builtins.quit=_sitebuiltins.Quitter('quit',eof)\n builtins.exit=_sitebuiltins.Quitter('exit',eof)\n \n \ndef setcopyright():\n ''\n builtins.copyright=_sitebuiltins._Printer(\"copyright\",sys.copyright)\n if sys.platform[:4]=='java':\n builtins.credits=_sitebuiltins._Printer(\n \"credits\",\n \"Jython is maintained by the Jython developers (www.jython.org).\")\n else :\n builtins.credits=_sitebuiltins._Printer(\"credits\",\"\"\"\\\n Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands\n for supporting Python development. See www.python.org for more information.\"\"\")\n files,dirs=[],[]\n \n \n if hasattr(os,'__file__'):\n here=os.path.dirname(os.__file__)\n files.extend([\"LICENSE.txt\",\"LICENSE\"])\n dirs.extend([os.path.join(here,os.pardir),here,os.curdir])\n builtins.license=_sitebuiltins._Printer(\n \"license\",\n \"See https://www.python.org/psf/license/\",\n files,dirs)\n \n \ndef sethelper():\n builtins.help=_sitebuiltins._Helper()\n \ndef enablerlcompleter():\n ''\n\n\n\n\n\n\n \n def register_readline():\n import atexit\n try :\n import readline\n import rlcompleter\n except ImportError:\n return\n \n \n \n readline_doc=getattr(readline,'__doc__','')\n if readline_doc is not None and 'libedit'in readline_doc:\n readline.parse_and_bind('bind ^I rl_complete')\n else :\n readline.parse_and_bind('tab: complete')\n \n try :\n readline.read_init_file()\n except OSError:\n \n \n \n \n pass\n \n if readline.get_current_history_length()==0:\n \n \n \n \n \n history=os.path.join(os.path.expanduser('~'),\n '.python_history')\n try :\n readline.read_history_file(history)\n except OSError:\n pass\n atexit.register(readline.write_history_file,history)\n \n sys.__interactivehook__=register_readline\n \ndef venv(known_paths):\n global PREFIXES,ENABLE_USER_SITE\n \n env=os.environ\n if sys.platform =='darwin'and '__PYVENV_LAUNCHER__'in env:\n executable=os.environ['__PYVENV_LAUNCHER__']\n else :\n executable=sys.executable\n exe_dir,_=os.path.split(os.path.abspath(executable))\n site_prefix=os.path.dirname(exe_dir)\n sys._home=None\n conf_basename='pyvenv.cfg'\n candidate_confs=[\n conffile for conffile in (\n os.path.join(exe_dir,conf_basename),\n os.path.join(site_prefix,conf_basename)\n )\n if os.path.isfile(conffile)\n ]\n \n if candidate_confs:\n virtual_conf=candidate_confs[0]\n system_site=\"true\"\n \n \n with open(virtual_conf,encoding='utf-8')as f:\n for line in f:\n if '='in line:\n key,_,value=line.partition('=')\n key=key.strip().lower()\n value=value.strip()\n if key =='include-system-site-packages':\n system_site=value.lower()\n elif key =='home':\n sys._home=value\n \n sys.prefix=sys.exec_prefix=site_prefix\n \n \n addsitepackages(known_paths,[sys.prefix])\n \n \n \n if system_site ==\"true\":\n PREFIXES.insert(0,sys.prefix)\n else :\n PREFIXES=[sys.prefix]\n ENABLE_USER_SITE=False\n \n return known_paths\n \n \ndef execsitecustomize():\n ''\n try :\n try :\n import sitecustomize\n except ImportError as exc:\n if exc.name =='sitecustomize':\n pass\n else :\n raise\n except Exception as err:\n if sys.flags.verbose:\n sys.excepthook(*sys.exc_info())\n else :\n sys.stderr.write(\n \"Error in sitecustomize; set PYTHONVERBOSE for traceback:\\n\"\n \"%s: %s\\n\"%\n (err.__class__.__name__,err))\n \n \ndef execusercustomize():\n ''\n try :\n try :\n import usercustomize\n except ImportError as exc:\n if exc.name =='usercustomize':\n pass\n else :\n raise\n except Exception as err:\n if sys.flags.verbose:\n sys.excepthook(*sys.exc_info())\n else :\n sys.stderr.write(\n \"Error in usercustomize; set PYTHONVERBOSE for traceback:\\n\"\n \"%s: %s\\n\"%\n (err.__class__.__name__,err))\n \n \ndef main():\n ''\n\n\n\n \n global ENABLE_USER_SITE\n \n orig_path=sys.path[:]\n known_paths=removeduppaths()\n if orig_path !=sys.path:\n \n \n abs_paths()\n \n known_paths=venv(known_paths)\n if ENABLE_USER_SITE is None :\n ENABLE_USER_SITE=check_enableusersite()\n known_paths=addusersitepackages(known_paths)\n known_paths=addsitepackages(known_paths)\n setquit()\n setcopyright()\n sethelper()\n if not sys.flags.isolated:\n enablerlcompleter()\n execsitecustomize()\n if ENABLE_USER_SITE:\n execusercustomize()\n \n \n \nif not sys.flags.no_site:\n main()\n \ndef _script():\n help=\"\"\"\\\n %s [--user-base] [--user-site]\n\n Without arguments print some useful information\n With arguments print the value of USER_BASE and/or USER_SITE separated\n by '%s'.\n\n Exit codes with --user-base or --user-site:\n 0 - user site directory is enabled\n 1 - user site directory is disabled by user\n 2 - uses site directory is disabled by super user\n or for security reasons\n >2 - unknown error\n \"\"\"\n args=sys.argv[1:]\n if not args:\n user_base=getuserbase()\n user_site=getusersitepackages()\n print(\"sys.path = [\")\n for dir in sys.path:\n print(\" %r,\"%(dir,))\n print(\"]\")\n print(\"USER_BASE: %r (%s)\"%(user_base,\n \"exists\"if os.path.isdir(user_base)else \"doesn't exist\"))\n print(\"USER_SITE: %r (%s)\"%(user_site,\n \"exists\"if os.path.isdir(user_site)else \"doesn't exist\"))\n print(\"ENABLE_USER_SITE: %r\"%ENABLE_USER_SITE)\n sys.exit(0)\n \n buffer=[]\n if '--user-base'in args:\n buffer.append(USER_BASE)\n if '--user-site'in args:\n buffer.append(USER_SITE)\n \n if buffer:\n print(os.pathsep.join(buffer))\n if ENABLE_USER_SITE:\n sys.exit(0)\n elif ENABLE_USER_SITE is False :\n sys.exit(1)\n elif ENABLE_USER_SITE is None :\n sys.exit(2)\n else :\n sys.exit(3)\n else :\n import textwrap\n print(textwrap.dedent(help %(sys.argv[0],os.pathsep)))\n sys.exit(10)\n \nif __name__ =='__main__':\n _script()\n", ["_sitebuiltins", "atexit", "builtins", "os", "readline", "rlcompleter", "sitecustomize", "sys", "textwrap", "traceback", "usercustomize"]], "socket": [".py", "\n\n\n\"\"\"\\\nThis module provides socket operations and some related functions.\nOn Unix, it supports IP (Internet Protocol) and Unix domain sockets.\nOn other systems, it only supports IP. Functions specific for a\nsocket are available as methods of the socket object.\n\nFunctions:\n\nsocket() -- create a new socket object\nsocketpair() -- create a pair of new socket objects [*]\nfromfd() -- create a socket object from an open file descriptor [*]\nfromshare() -- create a socket object from data received from socket.share() [*]\ngethostname() -- return the current hostname\ngethostbyname() -- map a hostname to its IP number\ngethostbyaddr() -- map an IP number or hostname to DNS info\ngetservbyname() -- map a service name and a protocol name to a port number\ngetprotobyname() -- map a protocol name (e.g. 'tcp') to a number\nntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order\nhtons(), htonl() -- convert 16, 32 bit int from host to network byte order\ninet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format\ninet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)\nsocket.getdefaulttimeout() -- get the default timeout value\nsocket.setdefaulttimeout() -- set the default timeout value\ncreate_connection() -- connects to an address, with an optional timeout and\n optional source address.\n\n [*] not available on all platforms!\n\nSpecial objects:\n\nSocketType -- type object for socket objects\nerror -- exception raised for I/O errors\nhas_ipv6 -- boolean value indicating if IPv6 is supported\n\nIntEnum constants:\n\nAF_INET, AF_UNIX -- socket domains (first argument to socket() call)\nSOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)\n\nInteger constants:\n\nMany other constants may be defined; these may be used in calls to\nthe setsockopt() and getsockopt() methods.\n\"\"\"\n\nimport _socket\nfrom _socket import *\n\nimport os,sys,io,selectors\nfrom enum import IntEnum,IntFlag\n\ntry :\n import errno\nexcept ImportError:\n errno=None\nEBADF=getattr(errno,'EBADF',9)\nEAGAIN=getattr(errno,'EAGAIN',11)\nEWOULDBLOCK=getattr(errno,'EWOULDBLOCK',11)\n\n__all__=[\"fromfd\",\"getfqdn\",\"create_connection\",\n\"AddressFamily\",\"SocketKind\"]\n__all__.extend(os._get_exports_list(_socket))\n\n\n\n\n\n\n\nIntEnum._convert(\n'AddressFamily',\n__name__,\nlambda C:C.isupper()and C.startswith('AF_'))\n\nIntEnum._convert(\n'SocketKind',\n__name__,\nlambda C:C.isupper()and C.startswith('SOCK_'))\n\nIntFlag._convert(\n'MsgFlag',\n__name__,\nlambda C:C.isupper()and C.startswith('MSG_'))\n\nIntFlag._convert(\n'AddressInfo',\n__name__,\nlambda C:C.isupper()and C.startswith('AI_'))\n\n_LOCALHOST='127.0.0.1'\n_LOCALHOST_V6='::1'\n\n\ndef _intenum_converter(value,enum_klass):\n ''\n\n\n \n try :\n return enum_klass(value)\n except ValueError:\n return value\n \n_realsocket=socket\n\n\nif sys.platform.lower().startswith(\"win\"):\n errorTab={}\n errorTab[10004]=\"The operation was interrupted.\"\n errorTab[10009]=\"A bad file handle was passed.\"\n errorTab[10013]=\"Permission denied.\"\n errorTab[10014]=\"A fault occurred on the network??\"\n errorTab[10022]=\"An invalid operation was attempted.\"\n errorTab[10035]=\"The socket operation would block\"\n errorTab[10036]=\"A blocking operation is already in progress.\"\n errorTab[10048]=\"The network address is in use.\"\n errorTab[10054]=\"The connection has been reset.\"\n errorTab[10058]=\"The network has been shut down.\"\n errorTab[10060]=\"The operation timed out.\"\n errorTab[10061]=\"Connection refused.\"\n errorTab[10063]=\"The name is too long.\"\n errorTab[10064]=\"The host is down.\"\n errorTab[10065]=\"The host is unreachable.\"\n __all__.append(\"errorTab\")\n \n \nclass _GiveupOnSendfile(Exception):pass\n\n\nclass socket(_socket.socket):\n\n ''\n \n __slots__=[\"__weakref__\",\"_io_refs\",\"_closed\"]\n \n def __init__(self,family=-1,type=-1,proto=-1,fileno=None ):\n \n \n \n \n if fileno is None :\n if family ==-1:\n family=AF_INET\n if type ==-1:\n type=SOCK_STREAM\n if proto ==-1:\n proto=0\n _socket.socket.__init__(self,family,type,proto,fileno)\n self._io_refs=0\n self._closed=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n if not self._closed:\n self.close()\n \n def __repr__(self):\n ''\n\n \n closed=getattr(self,'_closed',False )\n s=\"<%s.%s%s fd=%i, family=%s, type=%s, proto=%i\"\\\n %(self.__class__.__module__,\n self.__class__.__qualname__,\n \" [closed]\"if closed else \"\",\n self.fileno(),\n self.family,\n self.type,\n self.proto)\n if not closed:\n try :\n laddr=self.getsockname()\n if laddr:\n s +=\", laddr=%s\"%str(laddr)\n except error:\n pass\n try :\n raddr=self.getpeername()\n if raddr:\n s +=\", raddr=%s\"%str(raddr)\n except error:\n pass\n s +='>'\n return s\n \n def __getstate__(self):\n raise TypeError(\"Cannot serialize socket object\")\n \n def dup(self):\n ''\n\n\n\n \n fd=dup(self.fileno())\n sock=self.__class__(self.family,self.type,self.proto,fileno=fd)\n sock.settimeout(self.gettimeout())\n return sock\n \n def accept(self):\n ''\n\n\n\n\n \n fd,addr=self._accept()\n sock=socket(self.family,self.type,self.proto,fileno=fd)\n \n \n \n if getdefaulttimeout()is None and self.gettimeout():\n sock.setblocking(True )\n return sock,addr\n \n def makefile(self,mode=\"r\",buffering=None ,*,\n encoding=None ,errors=None ,newline=None ):\n ''\n\n\n\n \n \n if not set(mode)<={\"r\",\"w\",\"b\"}:\n raise ValueError(\"invalid mode %r (only r, w, b allowed)\"%(mode,))\n writing=\"w\"in mode\n reading=\"r\"in mode or not writing\n assert reading or writing\n binary=\"b\"in mode\n rawmode=\"\"\n if reading:\n rawmode +=\"r\"\n if writing:\n rawmode +=\"w\"\n raw=SocketIO(self,rawmode)\n self._io_refs +=1\n if buffering is None :\n buffering=-1\n if buffering <0:\n buffering=io.DEFAULT_BUFFER_SIZE\n if buffering ==0:\n if not binary:\n raise ValueError(\"unbuffered streams must be binary\")\n return raw\n if reading and writing:\n buffer=io.BufferedRWPair(raw,raw,buffering)\n elif reading:\n buffer=io.BufferedReader(raw,buffering)\n else :\n assert writing\n buffer=io.BufferedWriter(raw,buffering)\n if binary:\n return buffer\n text=io.TextIOWrapper(buffer,encoding,errors,newline)\n text.mode=mode\n return text\n \n if hasattr(os,'sendfile'):\n \n def _sendfile_use_sendfile(self,file,offset=0,count=None ):\n self._check_sendfile_params(file,offset,count)\n sockno=self.fileno()\n try :\n fileno=file.fileno()\n except (AttributeError,io.UnsupportedOperation)as err:\n raise _GiveupOnSendfile(err)\n try :\n fsize=os.fstat(fileno).st_size\n except OSError as err:\n raise _GiveupOnSendfile(err)\n if not fsize:\n return 0\n blocksize=fsize if not count else count\n \n timeout=self.gettimeout()\n if timeout ==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n \n \n \n if hasattr(selectors,'PollSelector'):\n selector=selectors.PollSelector()\n else :\n selector=selectors.SelectSelector()\n selector.register(sockno,selectors.EVENT_WRITE)\n \n total_sent=0\n \n selector_select=selector.select\n os_sendfile=os.sendfile\n try :\n while True :\n if timeout and not selector_select(timeout):\n raise _socket.timeout('timed out')\n if count:\n blocksize=count -total_sent\n if blocksize <=0:\n break\n try :\n sent=os_sendfile(sockno,fileno,offset,blocksize)\n except BlockingIOError:\n if not timeout:\n \n \n selector_select()\n continue\n except OSError as err:\n if total_sent ==0:\n \n \n \n \n raise _GiveupOnSendfile(err)\n raise err from None\n else :\n if sent ==0:\n break\n offset +=sent\n total_sent +=sent\n return total_sent\n finally :\n if total_sent >0 and hasattr(file,'seek'):\n file.seek(offset)\n else :\n def _sendfile_use_sendfile(self,file,offset=0,count=None ):\n raise _GiveupOnSendfile(\n \"os.sendfile() not available on this platform\")\n \n def _sendfile_use_send(self,file,offset=0,count=None ):\n self._check_sendfile_params(file,offset,count)\n if self.gettimeout()==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n if offset:\n file.seek(offset)\n blocksize=min(count,8192)if count else 8192\n total_sent=0\n \n file_read=file.read\n sock_send=self.send\n try :\n while True :\n if count:\n blocksize=min(count -total_sent,blocksize)\n if blocksize <=0:\n break\n data=memoryview(file_read(blocksize))\n if not data:\n break\n while True :\n try :\n sent=sock_send(data)\n except BlockingIOError:\n continue\n else :\n total_sent +=sent\n if sent 0 and hasattr(file,'seek'):\n file.seek(offset+total_sent)\n \n def _check_sendfile_params(self,file,offset,count):\n if 'b'not in getattr(file,'mode','b'):\n raise ValueError(\"file should be opened in binary mode\")\n if not self.type&SOCK_STREAM:\n raise ValueError(\"only SOCK_STREAM type sockets are supported\")\n if count is not None :\n if not isinstance(count,int):\n raise TypeError(\n \"count must be a positive integer (got {!r})\".format(count))\n if count <=0:\n raise ValueError(\n \"count must be a positive integer (got {!r})\".format(count))\n \n def sendfile(self,file,offset=0,count=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return self._sendfile_use_sendfile(file,offset,count)\n except _GiveupOnSendfile:\n return self._sendfile_use_send(file,offset,count)\n \n def _decref_socketios(self):\n if self._io_refs >0:\n self._io_refs -=1\n if self._closed:\n self.close()\n \n def _real_close(self,_ss=_socket.socket):\n \n _ss.close(self)\n \n def close(self):\n \n self._closed=True\n if self._io_refs <=0:\n self._real_close()\n \n def detach(self):\n ''\n\n\n\n\n \n self._closed=True\n return super().detach()\n \n @property\n def family(self):\n ''\n \n return _intenum_converter(super().family,AddressFamily)\n \n @property\n def type(self):\n ''\n \n return _intenum_converter(super().type,SocketKind)\n \n if os.name =='nt':\n def get_inheritable(self):\n return os.get_handle_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_handle_inheritable(self.fileno(),inheritable)\n else :\n def get_inheritable(self):\n return os.get_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_inheritable(self.fileno(),inheritable)\n get_inheritable.__doc__=\"Get the inheritable flag of the socket\"\n set_inheritable.__doc__=\"Set the inheritable flag of the socket\"\n \ndef fromfd(fd,family,type,proto=0):\n ''\n\n\n\n \n nfd=dup(fd)\n return socket(family,type,proto,nfd)\n \nif hasattr(_socket.socket,\"share\"):\n def fromshare(info):\n ''\n\n\n\n \n return socket(0,0,0,info)\n __all__.append(\"fromshare\")\n \nif hasattr(_socket,\"socketpair\"):\n\n def socketpair(family=None ,type=SOCK_STREAM,proto=0):\n ''\n\n\n\n\n\n \n if family is None :\n try :\n family=AF_UNIX\n except NameError:\n family=AF_INET\n a,b=_socket.socketpair(family,type,proto)\n a=socket(family,type,proto,a.detach())\n b=socket(family,type,proto,b.detach())\n return a,b\n \nelse :\n\n\n def socketpair(family=AF_INET,type=SOCK_STREAM,proto=0):\n if family ==AF_INET:\n host=_LOCALHOST\n elif family ==AF_INET6:\n host=_LOCALHOST_V6\n else :\n raise ValueError(\"Only AF_INET and AF_INET6 socket address families \"\n \"are supported\")\n if type !=SOCK_STREAM:\n raise ValueError(\"Only SOCK_STREAM socket type is supported\")\n if proto !=0:\n raise ValueError(\"Only protocol zero is supported\")\n \n \n \n lsock=socket(family,type,proto)\n try :\n lsock.bind((host,0))\n lsock.listen()\n \n addr,port=lsock.getsockname()[:2]\n csock=socket(family,type,proto)\n try :\n csock.setblocking(False )\n try :\n csock.connect((addr,port))\n except (BlockingIOError,InterruptedError):\n pass\n csock.setblocking(True )\n ssock,_=lsock.accept()\n except :\n csock.close()\n raise\n finally :\n lsock.close()\n return (ssock,csock)\n __all__.append(\"socketpair\")\n \nsocketpair.__doc__=\"\"\"socketpair([family[, type[, proto]]]) -> (socket object, socket object)\nCreate a pair of socket objects from the sockets returned by the platform\nsocketpair() function.\nThe arguments are the same as for socket() except the default family is AF_UNIX\nif defined on the platform; otherwise, the default is AF_INET.\n\"\"\"\n\n_blocking_errnos={EAGAIN,EWOULDBLOCK}\n\nclass SocketIO(io.RawIOBase):\n\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n def __init__(self,sock,mode):\n if mode not in (\"r\",\"w\",\"rw\",\"rb\",\"wb\",\"rwb\"):\n raise ValueError(\"invalid mode: %r\"%mode)\n io.RawIOBase.__init__(self)\n self._sock=sock\n if \"b\"not in mode:\n mode +=\"b\"\n self._mode=mode\n self._reading=\"r\"in mode\n self._writing=\"w\"in mode\n self._timeout_occurred=False\n \n def readinto(self,b):\n ''\n\n\n\n\n\n \n self._checkClosed()\n self._checkReadable()\n if self._timeout_occurred:\n raise OSError(\"cannot read from timed out object\")\n while True :\n try :\n return self._sock.recv_into(b)\n except timeout:\n self._timeout_occurred=True\n raise\n except error as e:\n if e.args[0]in _blocking_errnos:\n return None\n raise\n \n def write(self,b):\n ''\n\n\n\n \n self._checkClosed()\n self._checkWritable()\n try :\n return self._sock.send(b)\n except error as e:\n \n if e.args[0]in _blocking_errnos:\n return None\n raise\n \n def readable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._reading\n \n def writable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._writing\n \n def seekable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return super().seekable()\n \n def fileno(self):\n ''\n \n self._checkClosed()\n return self._sock.fileno()\n \n @property\n def name(self):\n if not self.closed:\n return self.fileno()\n else :\n return -1\n \n @property\n def mode(self):\n return self._mode\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n io.RawIOBase.close(self)\n self._sock._decref_socketios()\n self._sock=None\n \n \ndef getfqdn(name=''):\n ''\n\n\n\n\n\n\n \n name=name.strip()\n if not name or name =='0.0.0.0':\n name=gethostname()\n try :\n hostname,aliases,ipaddrs=gethostbyaddr(name)\n except error:\n pass\n else :\n aliases.insert(0,hostname)\n for name in aliases:\n if '.'in name:\n break\n else :\n name=hostname\n return name\n \n \n_GLOBAL_DEFAULT_TIMEOUT=object()\n\ndef create_connection(address,timeout=_GLOBAL_DEFAULT_TIMEOUT,\nsource_address=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n \n host,port=address\n err=None\n for res in getaddrinfo(host,port,0,SOCK_STREAM):\n af,socktype,proto,canonname,sa=res\n sock=None\n try :\n sock=socket(af,socktype,proto)\n if timeout is not _GLOBAL_DEFAULT_TIMEOUT:\n sock.settimeout(timeout)\n if source_address:\n sock.bind(source_address)\n sock.connect(sa)\n \n err=None\n return sock\n \n except error as _:\n err=_\n if sock is not None :\n sock.close()\n \n if err is not None :\n raise err\n else :\n raise error(\"getaddrinfo returns an empty list\")\n \ndef getaddrinfo(host,port,family=0,type=0,proto=0,flags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n addrlist=[]\n for res in _socket.getaddrinfo(host,port,family,type,proto,flags):\n af,socktype,proto,canonname,sa=res\n addrlist.append((_intenum_converter(af,AddressFamily),\n _intenum_converter(socktype,SocketKind),\n proto,canonname,sa))\n return addrlist\n", ["_socket", "enum", "errno", "io", "os", "selectors", "sys"]], "sre_compile": [".py", "\n\n\n\n\n\n\n\n\n\n\"\"\"Internal support module for sre\"\"\"\n\nimport _sre\nimport sre_parse\nfrom sre_constants import *\n\n\n\n_LITERAL_CODES={LITERAL,NOT_LITERAL}\n_REPEATING_CODES={REPEAT,MIN_REPEAT,MAX_REPEAT}\n_SUCCESS_CODES={SUCCESS,FAILURE}\n_ASSERT_CODES={ASSERT,ASSERT_NOT}\n_UNIT_CODES=_LITERAL_CODES |{ANY,IN}\n\n\n_equivalences=(\n\n(0x69,0x131),\n\n(0x73,0x17f),\n\n(0xb5,0x3bc),\n\n(0x345,0x3b9,0x1fbe),\n\n(0x390,0x1fd3),\n\n(0x3b0,0x1fe3),\n\n(0x3b2,0x3d0),\n\n(0x3b5,0x3f5),\n\n(0x3b8,0x3d1),\n\n(0x3ba,0x3f0),\n\n(0x3c0,0x3d6),\n\n(0x3c1,0x3f1),\n\n(0x3c2,0x3c3),\n\n(0x3c6,0x3d5),\n\n(0x1e61,0x1e9b),\n\n(0xfb05,0xfb06),\n)\n\n\n_ignorecase_fixes={i:tuple(j for j in t if i !=j)\nfor t in _equivalences for i in t}\n\ndef _combine_flags(flags,add_flags,del_flags,\nTYPE_FLAGS=sre_parse.TYPE_FLAGS):\n if add_flags&TYPE_FLAGS:\n flags &=~TYPE_FLAGS\n return (flags |add_flags)&~del_flags\n \ndef _compile(code,pattern,flags):\n\n emit=code.append\n _len=len\n LITERAL_CODES=_LITERAL_CODES\n REPEATING_CODES=_REPEATING_CODES\n SUCCESS_CODES=_SUCCESS_CODES\n ASSERT_CODES=_ASSERT_CODES\n iscased=None\n tolower=None\n fixes=None\n if flags&SRE_FLAG_IGNORECASE and not flags&SRE_FLAG_LOCALE:\n if flags&SRE_FLAG_UNICODE and not flags&SRE_FLAG_ASCII:\n iscased=_sre.unicode_iscased\n tolower=_sre.unicode_tolower\n fixes=_ignorecase_fixes\n else :\n iscased=_sre.ascii_iscased\n tolower=_sre.ascii_tolower\n for op,av in pattern:\n if op in LITERAL_CODES:\n if not flags&SRE_FLAG_IGNORECASE:\n emit(op)\n emit(av)\n elif flags&SRE_FLAG_LOCALE:\n emit(OP_LOCALE_IGNORE[op])\n emit(av)\n elif not iscased(av):\n emit(op)\n emit(av)\n else :\n lo=tolower(av)\n if not fixes:\n emit(OP_IGNORE[op])\n emit(lo)\n elif lo not in fixes:\n emit(OP_UNICODE_IGNORE[op])\n emit(lo)\n else :\n emit(IN_UNI_IGNORE)\n skip=_len(code);emit(0)\n if op is NOT_LITERAL:\n emit(NEGATE)\n for k in (lo,)+fixes[lo]:\n emit(LITERAL)\n emit(k)\n emit(FAILURE)\n code[skip]=_len(code)-skip\n elif op is IN:\n charset,hascased=_optimize_charset(av,iscased,tolower,fixes)\n if flags&SRE_FLAG_IGNORECASE and flags&SRE_FLAG_LOCALE:\n emit(IN_LOC_IGNORE)\n elif not hascased:\n emit(IN)\n elif not fixes:\n emit(IN_IGNORE)\n else :\n emit(IN_UNI_IGNORE)\n skip=_len(code);emit(0)\n _compile_charset(charset,flags,code)\n code[skip]=_len(code)-skip\n elif op is ANY:\n if flags&SRE_FLAG_DOTALL:\n emit(ANY_ALL)\n else :\n emit(ANY)\n elif op in REPEATING_CODES:\n if flags&SRE_FLAG_TEMPLATE:\n raise error(\"internal: unsupported template operator %r\"%(op,))\n if _simple(av[2]):\n if op is MAX_REPEAT:\n emit(REPEAT_ONE)\n else :\n emit(MIN_REPEAT_ONE)\n skip=_len(code);emit(0)\n emit(av[0])\n emit(av[1])\n _compile(code,av[2],flags)\n emit(SUCCESS)\n code[skip]=_len(code)-skip\n else :\n emit(REPEAT)\n skip=_len(code);emit(0)\n emit(av[0])\n emit(av[1])\n _compile(code,av[2],flags)\n code[skip]=_len(code)-skip\n if op is MAX_REPEAT:\n emit(MAX_UNTIL)\n else :\n emit(MIN_UNTIL)\n elif op is SUBPATTERN:\n group,add_flags,del_flags,p=av\n if group:\n emit(MARK)\n emit((group -1)*2)\n \n _compile(code,p,_combine_flags(flags,add_flags,del_flags))\n if group:\n emit(MARK)\n emit((group -1)*2+1)\n elif op in SUCCESS_CODES:\n emit(op)\n elif op in ASSERT_CODES:\n emit(op)\n skip=_len(code);emit(0)\n if av[0]>=0:\n emit(0)\n else :\n lo,hi=av[1].getwidth()\n if lo !=hi:\n raise error(\"look-behind requires fixed-width pattern\")\n emit(lo)\n _compile(code,av[1],flags)\n emit(SUCCESS)\n code[skip]=_len(code)-skip\n elif op is CALL:\n emit(op)\n skip=_len(code);emit(0)\n _compile(code,av,flags)\n emit(SUCCESS)\n code[skip]=_len(code)-skip\n elif op is AT:\n emit(op)\n if flags&SRE_FLAG_MULTILINE:\n av=AT_MULTILINE.get(av,av)\n if flags&SRE_FLAG_LOCALE:\n av=AT_LOCALE.get(av,av)\n elif (flags&SRE_FLAG_UNICODE)and not (flags&SRE_FLAG_ASCII):\n av=AT_UNICODE.get(av,av)\n emit(av)\n elif op is BRANCH:\n emit(op)\n tail=[]\n tailappend=tail.append\n for av in av[1]:\n skip=_len(code);emit(0)\n \n _compile(code,av,flags)\n emit(JUMP)\n tailappend(_len(code));emit(0)\n code[skip]=_len(code)-skip\n emit(FAILURE)\n for tail in tail:\n code[tail]=_len(code)-tail\n elif op is CATEGORY:\n emit(op)\n if flags&SRE_FLAG_LOCALE:\n av=CH_LOCALE[av]\n elif (flags&SRE_FLAG_UNICODE)and not (flags&SRE_FLAG_ASCII):\n av=CH_UNICODE[av]\n emit(av)\n elif op is GROUPREF:\n if not flags&SRE_FLAG_IGNORECASE:\n emit(op)\n elif flags&SRE_FLAG_LOCALE:\n emit(GROUPREF_LOC_IGNORE)\n elif not fixes:\n emit(GROUPREF_IGNORE)\n else :\n emit(GROUPREF_UNI_IGNORE)\n emit(av -1)\n elif op is GROUPREF_EXISTS:\n emit(op)\n emit(av[0]-1)\n skipyes=_len(code);emit(0)\n _compile(code,av[1],flags)\n if av[2]:\n emit(JUMP)\n skipno=_len(code);emit(0)\n code[skipyes]=_len(code)-skipyes+1\n _compile(code,av[2],flags)\n code[skipno]=_len(code)-skipno\n else :\n code[skipyes]=_len(code)-skipyes+1\n else :\n raise error(\"internal: unsupported operand type %r\"%(op,))\n \ndef _compile_charset(charset,flags,code):\n\n emit=code.append\n for op,av in charset:\n emit(op)\n if op is NEGATE:\n pass\n elif op is LITERAL:\n emit(av)\n elif op is RANGE or op is RANGE_UNI_IGNORE:\n emit(av[0])\n emit(av[1])\n elif op is CHARSET:\n code.extend(av)\n elif op is BIGCHARSET:\n code.extend(av)\n elif op is CATEGORY:\n if flags&SRE_FLAG_LOCALE:\n emit(CH_LOCALE[av])\n elif (flags&SRE_FLAG_UNICODE)and not (flags&SRE_FLAG_ASCII):\n emit(CH_UNICODE[av])\n else :\n emit(av)\n else :\n raise error(\"internal: unsupported set operator %r\"%(op,))\n emit(FAILURE)\n \ndef _optimize_charset(charset,iscased=None ,fixup=None ,fixes=None ):\n\n out=[]\n tail=[]\n charmap=bytearray(256)\n hascased=False\n for op,av in charset:\n while True :\n try :\n if op is LITERAL:\n if fixup:\n lo=fixup(av)\n charmap[lo]=1\n if fixes and lo in fixes:\n for k in fixes[lo]:\n charmap[k]=1\n if not hascased and iscased(av):\n hascased=True\n else :\n charmap[av]=1\n elif op is RANGE:\n r=range(av[0],av[1]+1)\n if fixup:\n if fixes:\n for i in map(fixup,r):\n charmap[i]=1\n if i in fixes:\n for k in fixes[i]:\n charmap[k]=1\n else :\n for i in map(fixup,r):\n charmap[i]=1\n if not hascased:\n hascased=any(map(iscased,r))\n else :\n for i in r:\n charmap[i]=1\n elif op is NEGATE:\n out.append((op,av))\n else :\n tail.append((op,av))\n except IndexError:\n if len(charmap)==256:\n \n charmap +=b'\\0'*0xff00\n continue\n \n if fixup:\n hascased=True\n \n \n \n if op is RANGE:\n op=RANGE_UNI_IGNORE\n tail.append((op,av))\n break\n \n \n runs=[]\n q=0\n while True :\n p=charmap.find(1,q)\n if p <0:\n break\n if len(runs)>=2:\n runs=None\n break\n q=charmap.find(0,p)\n if q <0:\n runs.append((p,len(charmap)))\n break\n runs.append((p,q))\n if runs is not None :\n \n for p,q in runs:\n if q -p ==1:\n out.append((LITERAL,p))\n else :\n out.append((RANGE,(p,q -1)))\n out +=tail\n \n if hascased or len(out)0xffff:\n return None\n if any(map(iscased,range(av[0],av[1]+1))):\n return None\n return charset\n return None\n \ndef _compile_info(code,pattern,flags):\n\n\n\n lo,hi=pattern.getwidth()\n if hi >MAXCODE:\n hi=MAXCODE\n if lo ==0:\n code.extend([INFO,4,0,lo,hi])\n return\n \n prefix=[]\n prefix_skip=0\n charset=[]\n if not (flags&SRE_FLAG_IGNORECASE and flags&SRE_FLAG_LOCALE):\n \n prefix,prefix_skip,got_all=_get_literal_prefix(pattern,flags)\n \n if not prefix:\n charset=_get_charset_prefix(pattern,flags)\n \n \n \n \n \n emit=code.append\n emit(INFO)\n skip=len(code);emit(0)\n \n mask=0\n if prefix:\n mask=SRE_INFO_PREFIX\n if prefix_skip is None and got_all:\n mask=mask |SRE_INFO_LITERAL\n elif charset:\n mask=mask |SRE_INFO_CHARSET\n emit(mask)\n \n if lo MAXGROUPS:\n raise error(\"too many groups\")\n if name is not None :\n ogid=self.groupdict.get(name,None )\n if ogid is not None :\n raise error(\"redefinition of group name %r as group %d; \"\n \"was group %d\"%(name,gid,ogid))\n self.groupdict[name]=gid\n return gid\n def closegroup(self,gid,p):\n self.groupwidths[gid]=p.getwidth()\n def checkgroup(self,gid):\n return gid =self.lookbehindgroups:\n raise source.error('cannot refer to group defined in the same '\n 'lookbehind subpattern')\n \nclass SubPattern:\n\n def __init__(self,pattern,data=None ):\n self.pattern=pattern\n if data is None :\n data=[]\n self.data=data\n self.width=None\n \n def dump(self,level=0):\n nl=True\n seqtypes=(tuple,list)\n for op,av in self.data:\n print(level *\" \"+str(op),end='')\n if op is IN:\n \n print()\n for op,a in av:\n print((level+1)*\" \"+str(op),a)\n elif op is BRANCH:\n print()\n for i,a in enumerate(av[1]):\n if i:\n print(level *\" \"+\"OR\")\n a.dump(level+1)\n elif op is GROUPREF_EXISTS:\n condgroup,item_yes,item_no=av\n print('',condgroup)\n item_yes.dump(level+1)\n if item_no:\n print(level *\" \"+\"ELSE\")\n item_no.dump(level+1)\n elif isinstance(av,seqtypes):\n nl=False\n for a in av:\n if isinstance(a,SubPattern):\n if not nl:\n print()\n a.dump(level+1)\n nl=True\n else :\n if not nl:\n print(' ',end='')\n print(a,end='')\n nl=False\n if not nl:\n print()\n else :\n print('',av)\n def __repr__(self):\n return repr(self.data)\n def __len__(self):\n return len(self.data)\n def __delitem__(self,index):\n del self.data[index]\n def __getitem__(self,index):\n if isinstance(index,slice):\n return SubPattern(self.pattern,self.data[index])\n return self.data[index]\n def __setitem__(self,index,code):\n self.data[index]=code\n def insert(self,index,code):\n self.data.insert(index,code)\n def append(self,code):\n self.data.append(code)\n def getwidth(self):\n \n if self.width is not None :\n return self.width\n lo=hi=0\n for op,av in self.data:\n if op is BRANCH:\n i=MAXREPEAT -1\n j=0\n for av in av[1]:\n l,h=av.getwidth()\n i=min(i,l)\n j=max(j,h)\n lo=lo+i\n hi=hi+j\n elif op is CALL:\n i,j=av.getwidth()\n lo=lo+i\n hi=hi+j\n elif op is SUBPATTERN:\n i,j=av[-1].getwidth()\n lo=lo+i\n hi=hi+j\n elif op in _REPEATCODES:\n i,j=av[2].getwidth()\n lo=lo+i *av[0]\n hi=hi+j *av[1]\n elif op in _UNITCODES:\n lo=lo+1\n hi=hi+1\n elif op is GROUPREF:\n i,j=self.pattern.groupwidths[av]\n lo=lo+i\n hi=hi+j\n elif op is GROUPREF_EXISTS:\n i,j=av[1].getwidth()\n if av[2]is not None :\n l,h=av[2].getwidth()\n i=min(i,l)\n j=max(j,h)\n else :\n i=0\n lo=lo+i\n hi=hi+j\n elif op is SUCCESS:\n break\n self.width=min(lo,MAXREPEAT -1),min(hi,MAXREPEAT)\n return self.width\n \nclass Tokenizer:\n def __init__(self,string):\n self.istext=isinstance(string,str)\n self.string=string\n if not self.istext:\n string=str(string,'latin1')\n self.decoded_string=string\n self.index=0\n self.next=None\n self.__next()\n def __next(self):\n index=self.index\n try :\n char=self.decoded_string[index]\n except IndexError:\n self.next=None\n return\n if char ==\"\\\\\":\n index +=1\n try :\n char +=self.decoded_string[index]\n except IndexError:\n raise error(\"bad escape (end of pattern)\",\n self.string,len(self.string)-1)from None\n self.index=index+1\n self.next=char\n def match(self,char):\n if char ==self.next:\n self.__next()\n return True\n return False\n def get(self):\n this=self.next\n self.__next()\n return this\n def getwhile(self,n,charset):\n result=''\n for _ in range(n):\n c=self.next\n if c not in charset:\n break\n result +=c\n self.__next()\n return result\n def getuntil(self,terminator):\n result=''\n while True :\n c=self.next\n self.__next()\n if c is None :\n if not result:\n raise self.error(\"missing group name\")\n raise self.error(\"missing %s, unterminated name\"%terminator,\n len(result))\n if c ==terminator:\n if not result:\n raise self.error(\"missing group name\",1)\n break\n result +=c\n return result\n @property\n def pos(self):\n return self.index -len(self.next or '')\n def tell(self):\n return self.index -len(self.next or '')\n def seek(self,index):\n self.index=index\n self.__next()\n \n def error(self,msg,offset=0):\n return error(msg,self.string,self.tell()-offset)\n \ndef _class_escape(source,escape):\n\n code=ESCAPES.get(escape)\n if code:\n return code\n code=CATEGORIES.get(escape)\n if code and code[0]is IN:\n return code\n try :\n c=escape[1:2]\n if c ==\"x\":\n \n escape +=source.getwhile(2,HEXDIGITS)\n if len(escape)!=4:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"u\"and source.istext:\n \n escape +=source.getwhile(4,HEXDIGITS)\n if len(escape)!=6:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"U\"and source.istext:\n \n escape +=source.getwhile(8,HEXDIGITS)\n if len(escape)!=10:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n c=int(escape[2:],16)\n chr(c)\n return LITERAL,c\n elif c in OCTDIGITS:\n \n escape +=source.getwhile(2,OCTDIGITS)\n c=int(escape[1:],8)\n if c >0o377:\n raise source.error('octal escape value %s outside of '\n 'range 0-0o377'%escape,len(escape))\n return LITERAL,c\n elif c in DIGITS:\n raise ValueError\n if len(escape)==2:\n if c in ASCIILETTERS:\n raise source.error('bad escape %s'%escape,len(escape))\n return LITERAL,ord(escape[1])\n except ValueError:\n pass\n raise source.error(\"bad escape %s\"%escape,len(escape))\n \ndef _escape(source,escape,state):\n\n code=CATEGORIES.get(escape)\n if code:\n return code\n code=ESCAPES.get(escape)\n if code:\n return code\n try :\n c=escape[1:2]\n if c ==\"x\":\n \n escape +=source.getwhile(2,HEXDIGITS)\n if len(escape)!=4:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"u\"and source.istext:\n \n escape +=source.getwhile(4,HEXDIGITS)\n if len(escape)!=6:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"U\"and source.istext:\n \n escape +=source.getwhile(8,HEXDIGITS)\n if len(escape)!=10:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n c=int(escape[2:],16)\n chr(c)\n return LITERAL,c\n elif c ==\"0\":\n \n escape +=source.getwhile(2,OCTDIGITS)\n return LITERAL,int(escape[1:],8)\n elif c in DIGITS:\n \n if source.next in DIGITS:\n escape +=source.get()\n if (escape[1]in OCTDIGITS and escape[2]in OCTDIGITS and\n source.next in OCTDIGITS):\n \n escape +=source.get()\n c=int(escape[1:],8)\n if c >0o377:\n raise source.error('octal escape value %s outside of '\n 'range 0-0o377'%escape,\n len(escape))\n return LITERAL,c\n \n group=int(escape[1:])\n if group =MAXREPEAT:\n raise OverflowError(\"the repetition number is too large\")\n if hi:\n max=int(hi)\n if max >=MAXREPEAT:\n raise OverflowError(\"the repetition number is too large\")\n if max \")\n if not name.isidentifier():\n msg=\"bad character in group name %r\"%name\n raise source.error(msg,len(name)+1)\n elif sourcematch(\"=\"):\n \n name=source.getuntil(\")\")\n if not name.isidentifier():\n msg=\"bad character in group name %r\"%name\n raise source.error(msg,len(name)+1)\n gid=state.groupdict.get(name)\n if gid is None :\n msg=\"unknown group name %r\"%name\n raise source.error(msg,len(name)+1)\n if not state.checkgroup(gid):\n raise source.error(\"cannot refer to an open group\",\n len(name)+1)\n state.checklookbehindgroup(gid,source)\n subpatternappend((GROUPREF,gid))\n continue\n \n else :\n char=sourceget()\n if char is None :\n raise source.error(\"unexpected end of pattern\")\n raise source.error(\"unknown extension ?P\"+char,\n len(char)+2)\n elif char ==\":\":\n \n group=None\n elif char ==\"#\":\n \n while True :\n if source.next is None :\n raise source.error(\"missing ), unterminated comment\",\n source.tell()-start)\n if sourceget()==\")\":\n break\n continue\n \n elif char in \"=!<\":\n \n dir=1\n if char ==\"<\":\n char=sourceget()\n if char is None :\n raise source.error(\"unexpected end of pattern\")\n if char not in \"=!\":\n raise source.error(\"unknown extension ?<\"+char,\n len(char)+2)\n dir=-1\n lookbehindgroups=state.lookbehindgroups\n if lookbehindgroups is None :\n state.lookbehindgroups=state.groups\n p=_parse_sub(source,state,verbose,nested+1)\n if dir <0:\n if lookbehindgroups is None :\n state.lookbehindgroups=None\n if not sourcematch(\")\"):\n raise source.error(\"missing ), unterminated subpattern\",\n source.tell()-start)\n if char ==\"=\":\n subpatternappend((ASSERT,(dir,p)))\n else :\n subpatternappend((ASSERT_NOT,(dir,p)))\n continue\n \n elif char ==\"(\":\n \n condname=source.getuntil(\")\")\n if condname.isidentifier():\n condgroup=state.groupdict.get(condname)\n if condgroup is None :\n msg=\"unknown group name %r\"%condname\n raise source.error(msg,len(condname)+1)\n else :\n try :\n condgroup=int(condname)\n if condgroup <0:\n raise ValueError\n except ValueError:\n msg=\"bad character in group name %r\"%condname\n raise source.error(msg,len(condname)+1)from None\n if not condgroup:\n raise source.error(\"bad group number\",\n len(condname)+1)\n if condgroup >=MAXGROUPS:\n msg=\"invalid group reference %d\"%condgroup\n raise source.error(msg,len(condname)+1)\n state.checklookbehindgroup(condgroup,source)\n item_yes=_parse(source,state,verbose,nested+1)\n if source.match(\"|\"):\n item_no=_parse(source,state,verbose,nested+1)\n if source.next ==\"|\":\n raise source.error(\"conditional backref with more than two branches\")\n else :\n item_no=None\n if not source.match(\")\"):\n raise source.error(\"missing ), unterminated subpattern\",\n source.tell()-start)\n subpatternappend((GROUPREF_EXISTS,(condgroup,item_yes,item_no)))\n continue\n \n elif char in FLAGS or char ==\"-\":\n \n flags=_parse_flags(source,state,char)\n if flags is None :\n if not first or subpattern:\n import warnings\n warnings.warn(\n 'Flags not at the start of the expression %r%s'%(\n source.string[:20],\n ' (truncated)'if len(source.string)>20 else '',\n ),\n DeprecationWarning,stacklevel=nested+6\n )\n if (state.flags&SRE_FLAG_VERBOSE)and not verbose:\n raise Verbose\n continue\n \n add_flags,del_flags=flags\n group=None\n else :\n raise source.error(\"unknown extension ?\"+char,\n len(char)+1)\n \n \n if group is not None :\n try :\n group=state.opengroup(name)\n except error as err:\n raise source.error(err.msg,len(name)+1)from None\n sub_verbose=((verbose or (add_flags&SRE_FLAG_VERBOSE))and\n not (del_flags&SRE_FLAG_VERBOSE))\n p=_parse_sub(source,state,sub_verbose,nested+1)\n if not source.match(\")\"):\n raise source.error(\"missing ), unterminated subpattern\",\n source.tell()-start)\n if group is not None :\n state.closegroup(group,p)\n subpatternappend((SUBPATTERN,(group,add_flags,del_flags,p)))\n \n elif this ==\"^\":\n subpatternappend((AT,AT_BEGINNING))\n \n elif this ==\"$\":\n subpatternappend((AT,AT_END))\n \n else :\n raise AssertionError(\"unsupported special character %r\"%(char,))\n \n \n for i in range(len(subpattern))[::-1]:\n op,av=subpattern[i]\n if op is SUBPATTERN:\n group,add_flags,del_flags,p=av\n if group is None and not add_flags and not del_flags:\n subpattern[i:i+1]=p\n \n return subpattern\n \ndef _parse_flags(source,state,char):\n sourceget=source.get\n add_flags=0\n del_flags=0\n if char !=\"-\":\n while True :\n flag=FLAGS[char]\n if source.istext:\n if char =='L':\n msg=\"bad inline flags: cannot use 'L' flag with a str pattern\"\n raise source.error(msg)\n else :\n if char =='u':\n msg=\"bad inline flags: cannot use 'u' flag with a bytes pattern\"\n raise source.error(msg)\n add_flags |=flag\n if (flag&TYPE_FLAGS)and (add_flags&TYPE_FLAGS)!=flag:\n msg=\"bad inline flags: flags 'a', 'u' and 'L' are incompatible\"\n raise source.error(msg)\n char=sourceget()\n if char is None :\n raise source.error(\"missing -, : or )\")\n if char in \")-:\":\n break\n if char not in FLAGS:\n msg=\"unknown flag\"if char.isalpha()else \"missing -, : or )\"\n raise source.error(msg,len(char))\n if char ==\")\":\n state.flags |=add_flags\n return None\n if add_flags&GLOBAL_FLAGS:\n raise source.error(\"bad inline flags: cannot turn on global flag\",1)\n if char ==\"-\":\n char=sourceget()\n if char is None :\n raise source.error(\"missing flag\")\n if char not in FLAGS:\n msg=\"unknown flag\"if char.isalpha()else \"missing flag\"\n raise source.error(msg,len(char))\n while True :\n flag=FLAGS[char]\n if flag&TYPE_FLAGS:\n msg=\"bad inline flags: cannot turn off flags 'a', 'u' and 'L'\"\n raise source.error(msg)\n del_flags |=flag\n char=sourceget()\n if char is None :\n raise source.error(\"missing :\")\n if char ==\":\":\n break\n if char not in FLAGS:\n msg=\"unknown flag\"if char.isalpha()else \"missing :\"\n raise source.error(msg,len(char))\n assert char ==\":\"\n if del_flags&GLOBAL_FLAGS:\n raise source.error(\"bad inline flags: cannot turn off global flag\",1)\n if add_flags&del_flags:\n raise source.error(\"bad inline flags: flag turned on and off\",1)\n return add_flags,del_flags\n \ndef fix_flags(src,flags):\n\n if isinstance(src,str):\n if flags&SRE_FLAG_LOCALE:\n raise ValueError(\"cannot use LOCALE flag with a str pattern\")\n if not flags&SRE_FLAG_ASCII:\n flags |=SRE_FLAG_UNICODE\n elif flags&SRE_FLAG_UNICODE:\n raise ValueError(\"ASCII and UNICODE flags are incompatible\")\n else :\n if flags&SRE_FLAG_UNICODE:\n raise ValueError(\"cannot use UNICODE flag with a bytes pattern\")\n if flags&SRE_FLAG_LOCALE and flags&SRE_FLAG_ASCII:\n raise ValueError(\"ASCII and LOCALE flags are incompatible\")\n return flags\n \ndef parse(str,flags=0,pattern=None ):\n\n\n source=Tokenizer(str)\n \n if pattern is None :\n pattern=Pattern()\n pattern.flags=flags\n pattern.str=str\n \n try :\n p=_parse_sub(source,pattern,flags&SRE_FLAG_VERBOSE,0)\n except Verbose:\n \n \n pattern=Pattern()\n pattern.flags=flags |SRE_FLAG_VERBOSE\n pattern.str=str\n source.seek(0)\n p=_parse_sub(source,pattern,True ,0)\n \n p.pattern.flags=fix_flags(str,p.pattern.flags)\n \n if source.next is not None :\n assert source.next ==\")\"\n raise source.error(\"unbalanced parenthesis\")\n \n if flags&SRE_FLAG_DEBUG:\n p.dump()\n \n return p\n \ndef parse_template(source,pattern):\n\n\n s=Tokenizer(source)\n sget=s.get\n groups=[]\n literals=[]\n literal=[]\n lappend=literal.append\n def addgroup(index,pos):\n if index >pattern.groups:\n raise s.error(\"invalid group reference %d\"%index,pos)\n if literal:\n literals.append(''.join(literal))\n del literal[:]\n groups.append((len(literals),index))\n literals.append(None )\n groupindex=pattern.groupindex\n while True :\n this=sget()\n if this is None :\n break\n if this[0]==\"\\\\\":\n \n c=this[1]\n if c ==\"g\":\n name=\"\"\n if not s.match(\"<\"):\n raise s.error(\"missing <\")\n name=s.getuntil(\">\")\n if name.isidentifier():\n try :\n index=groupindex[name]\n except KeyError:\n raise IndexError(\"unknown group name %r\"%name)\n else :\n try :\n index=int(name)\n if index <0:\n raise ValueError\n except ValueError:\n raise s.error(\"bad character in group name %r\"%name,\n len(name)+1)from None\n if index >=MAXGROUPS:\n raise s.error(\"invalid group reference %d\"%index,\n len(name)+1)\n addgroup(index,len(name)+1)\n elif c ==\"0\":\n if s.next in OCTDIGITS:\n this +=sget()\n if s.next in OCTDIGITS:\n this +=sget()\n lappend(chr(int(this[1:],8)&0xff))\n elif c in DIGITS:\n isoctal=False\n if s.next in DIGITS:\n this +=sget()\n if (c in OCTDIGITS and this[2]in OCTDIGITS and\n s.next in OCTDIGITS):\n this +=sget()\n isoctal=True\n c=int(this[1:],8)\n if c >0o377:\n raise s.error('octal escape value %s outside of '\n 'range 0-0o377'%this,len(this))\n lappend(chr(c))\n if not isoctal:\n addgroup(int(this[1:]),len(this)-1)\n else :\n try :\n this=chr(ESCAPES[this][1])\n except KeyError:\n if c in ASCIILETTERS:\n raise s.error('bad escape %s'%this,len(this))\n lappend(this)\n else :\n lappend(this)\n if literal:\n literals.append(''.join(literal))\n if not isinstance(source,str):\n \n \n literals=[None if s is None else s.encode('latin-1')for s in literals]\n return groups,literals\n \ndef expand_template(template,match):\n g=match.group\n empty=match.string[:0]\n groups,literals=template\n literals=literals[:]\n try :\n for index,group in groups:\n literals[index]=g(group)or empty\n except IndexError:\n raise error(\"invalid group reference %d\"%index)\n return empty.join(literals)\n", ["sre_constants", "warnings"]], "stat": [".py", "''\n\n\n\n\n\n\nST_MODE=0\nST_INO=1\nST_DEV=2\nST_NLINK=3\nST_UID=4\nST_GID=5\nST_SIZE=6\nST_ATIME=7\nST_MTIME=8\nST_CTIME=9\n\n\n\ndef S_IMODE(mode):\n ''\n\n \n return mode&0o7777\n \ndef S_IFMT(mode):\n ''\n\n \n return mode&0o170000\n \n \n \n \nS_IFDIR=0o040000\nS_IFCHR=0o020000\nS_IFBLK=0o060000\nS_IFREG=0o100000\nS_IFIFO=0o010000\nS_IFLNK=0o120000\nS_IFSOCK=0o140000\n\n\n\ndef S_ISDIR(mode):\n ''\n return S_IFMT(mode)==S_IFDIR\n \ndef S_ISCHR(mode):\n ''\n return S_IFMT(mode)==S_IFCHR\n \ndef S_ISBLK(mode):\n ''\n return S_IFMT(mode)==S_IFBLK\n \ndef S_ISREG(mode):\n ''\n return S_IFMT(mode)==S_IFREG\n \ndef S_ISFIFO(mode):\n ''\n return S_IFMT(mode)==S_IFIFO\n \ndef S_ISLNK(mode):\n ''\n return S_IFMT(mode)==S_IFLNK\n \ndef S_ISSOCK(mode):\n ''\n return S_IFMT(mode)==S_IFSOCK\n \n \n \nS_ISUID=0o4000\nS_ISGID=0o2000\nS_ENFMT=S_ISGID\nS_ISVTX=0o1000\nS_IREAD=0o0400\nS_IWRITE=0o0200\nS_IEXEC=0o0100\nS_IRWXU=0o0700\nS_IRUSR=0o0400\nS_IWUSR=0o0200\nS_IXUSR=0o0100\nS_IRWXG=0o0070\nS_IRGRP=0o0040\nS_IWGRP=0o0020\nS_IXGRP=0o0010\nS_IRWXO=0o0007\nS_IROTH=0o0004\nS_IWOTH=0o0002\nS_IXOTH=0o0001\n\n\n\nUF_NODUMP=0x00000001\nUF_IMMUTABLE=0x00000002\nUF_APPEND=0x00000004\nUF_OPAQUE=0x00000008\nUF_NOUNLINK=0x00000010\nUF_COMPRESSED=0x00000020\nUF_HIDDEN=0x00008000\nSF_ARCHIVED=0x00010000\nSF_IMMUTABLE=0x00020000\nSF_APPEND=0x00040000\nSF_NOUNLINK=0x00100000\nSF_SNAPSHOT=0x00200000\n\n\n_filemode_table=(\n((S_IFLNK,\"l\"),\n(S_IFREG,\"-\"),\n(S_IFBLK,\"b\"),\n(S_IFDIR,\"d\"),\n(S_IFCHR,\"c\"),\n(S_IFIFO,\"p\")),\n\n((S_IRUSR,\"r\"),),\n((S_IWUSR,\"w\"),),\n((S_IXUSR |S_ISUID,\"s\"),\n(S_ISUID,\"S\"),\n(S_IXUSR,\"x\")),\n\n((S_IRGRP,\"r\"),),\n((S_IWGRP,\"w\"),),\n((S_IXGRP |S_ISGID,\"s\"),\n(S_ISGID,\"S\"),\n(S_IXGRP,\"x\")),\n\n((S_IROTH,\"r\"),),\n((S_IWOTH,\"w\"),),\n((S_IXOTH |S_ISVTX,\"t\"),\n(S_ISVTX,\"T\"),\n(S_IXOTH,\"x\"))\n)\n\ndef filemode(mode):\n ''\n perm=[]\n for table in _filemode_table:\n for bit,char in table:\n if mode&bit ==bit:\n perm.append(char)\n break\n else :\n perm.append(\"-\")\n return \"\".join(perm)\n \n \n \n \n \nFILE_ATTRIBUTE_ARCHIVE=32\nFILE_ATTRIBUTE_COMPRESSED=2048\nFILE_ATTRIBUTE_DEVICE=64\nFILE_ATTRIBUTE_DIRECTORY=16\nFILE_ATTRIBUTE_ENCRYPTED=16384\nFILE_ATTRIBUTE_HIDDEN=2\nFILE_ATTRIBUTE_INTEGRITY_STREAM=32768\nFILE_ATTRIBUTE_NORMAL=128\nFILE_ATTRIBUTE_NOT_CONTENT_INDEXED=8192\nFILE_ATTRIBUTE_NO_SCRUB_DATA=131072\nFILE_ATTRIBUTE_OFFLINE=4096\nFILE_ATTRIBUTE_READONLY=1\nFILE_ATTRIBUTE_REPARSE_POINT=1024\nFILE_ATTRIBUTE_SPARSE_FILE=512\nFILE_ATTRIBUTE_SYSTEM=4\nFILE_ATTRIBUTE_TEMPORARY=256\nFILE_ATTRIBUTE_VIRTUAL=65536\n\n\n\ntry :\n from _stat import *\nexcept ImportError:\n pass\n", ["_stat"]], "string": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\"ascii_letters\",\"ascii_lowercase\",\"ascii_uppercase\",\"capwords\",\n\"digits\",\"hexdigits\",\"octdigits\",\"printable\",\"punctuation\",\n\"whitespace\",\"Formatter\",\"Template\"]\n\nimport _string\n\n\nwhitespace=' \\t\\n\\r\\v\\f'\nascii_lowercase='abcdefghijklmnopqrstuvwxyz'\nascii_uppercase='ABCDEFGHIJKLMNOPQRSTUVWXYZ'\nascii_letters=ascii_lowercase+ascii_uppercase\ndigits='0123456789'\nhexdigits=digits+'abcdef'+'ABCDEF'\noctdigits='01234567'\npunctuation=r\"\"\"!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~\"\"\"\nprintable=digits+ascii_letters+punctuation+whitespace\n\n\n\n\ndef capwords(s,sep=None ):\n ''\n\n\n\n\n\n\n\n\n \n return (sep or ' ').join(x.capitalize()for x in s.split(sep))\n \n \n \nimport re as _re\nfrom collections import ChainMap as _ChainMap\n\nclass _TemplateMetaclass(type):\n pattern=r\"\"\"\n %(delim)s(?:\n (?P%(delim)s) | # Escape sequence of two delimiters\n (?P%(id)s) | # delimiter and a Python identifier\n {(?P%(bid)s)} | # delimiter and a braced identifier\n (?P) # Other ill-formed delimiter exprs\n )\n \"\"\"\n \n def __init__(cls,name,bases,dct):\n super(_TemplateMetaclass,cls).__init__(name,bases,dct)\n if 'pattern'in dct:\n pattern=cls.pattern\n else :\n pattern=_TemplateMetaclass.pattern %{\n 'delim':_re.escape(cls.delimiter),\n 'id':cls.idpattern,\n 'bid':cls.braceidpattern or cls.idpattern,\n }\n cls.pattern=_re.compile(pattern,cls.flags |_re.VERBOSE)\n \n \nclass Template(metaclass=_TemplateMetaclass):\n ''\n \n delimiter='$'\n \n \n \n \n idpattern=r'(?a:[_a-z][_a-z0-9]*)'\n braceidpattern=None\n flags=_re.IGNORECASE\n \n def __init__(self,template):\n self.template=template\n \n \n \n def _invalid(self,mo):\n i=mo.start('invalid')\n lines=self.template[:i].splitlines(keepends=True )\n if not lines:\n colno=1\n lineno=1\n else :\n colno=i -len(''.join(lines[:-1]))\n lineno=len(lines)\n raise ValueError('Invalid placeholder in string: line %d, col %d'%\n (lineno,colno))\n \n def substitute(*args,**kws):\n if not args:\n raise TypeError(\"descriptor 'substitute' of 'Template' object \"\n \"needs an argument\")\n self,*args=args\n if len(args)>1:\n raise TypeError('Too many positional arguments')\n if not args:\n mapping=kws\n elif kws:\n mapping=_ChainMap(kws,args[0])\n else :\n mapping=args[0]\n \n def convert(mo):\n \n named=mo.group('named')or mo.group('braced')\n if named is not None :\n return str(mapping[named])\n if mo.group('escaped')is not None :\n return self.delimiter\n if mo.group('invalid')is not None :\n self._invalid(mo)\n raise ValueError('Unrecognized named group in pattern',\n self.pattern)\n return self.pattern.sub(convert,self.template)\n \n def safe_substitute(*args,**kws):\n if not args:\n raise TypeError(\"descriptor 'safe_substitute' of 'Template' object \"\n \"needs an argument\")\n self,*args=args\n if len(args)>1:\n raise TypeError('Too many positional arguments')\n if not args:\n mapping=kws\n elif kws:\n mapping=_ChainMap(kws,args[0])\n else :\n mapping=args[0]\n \n def convert(mo):\n named=mo.group('named')or mo.group('braced')\n if named is not None :\n try :\n return str(mapping[named])\n except KeyError:\n return mo.group()\n if mo.group('escaped')is not None :\n return self.delimiter\n if mo.group('invalid')is not None :\n return mo.group()\n raise ValueError('Unrecognized named group in pattern',\n self.pattern)\n return self.pattern.sub(convert,self.template)\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Formatter:\n def format(*args,**kwargs):\n if not args:\n raise TypeError(\"descriptor 'format' of 'Formatter' object \"\n \"needs an argument\")\n self,*args=args\n try :\n format_string,*args=args\n except ValueError:\n raise TypeError(\"format() missing 1 required positional \"\n \"argument: 'format_string'\")from None\n return self.vformat(format_string,args,kwargs)\n \n def vformat(self,format_string,args,kwargs):\n used_args=set()\n result,_=self._vformat(format_string,args,kwargs,used_args,2)\n self.check_unused_args(used_args,args,kwargs)\n return result\n \n def _vformat(self,format_string,args,kwargs,used_args,recursion_depth,\n auto_arg_index=0):\n if recursion_depth <0:\n raise ValueError('Max string recursion exceeded')\n result=[]\n for literal_text,field_name,format_spec,conversion in\\\n self.parse(format_string):\n \n \n if literal_text:\n result.append(literal_text)\n \n \n if field_name is not None :\n \n \n \n \n if field_name =='':\n if auto_arg_index is False :\n raise ValueError('cannot switch from manual field '\n 'specification to automatic field '\n 'numbering')\n field_name=str(auto_arg_index)\n auto_arg_index +=1\n elif field_name.isdigit():\n if auto_arg_index:\n raise ValueError('cannot switch from manual field '\n 'specification to automatic field '\n 'numbering')\n \n \n auto_arg_index=False\n \n \n \n obj,arg_used=self.get_field(field_name,args,kwargs)\n used_args.add(arg_used)\n \n \n obj=self.convert_field(obj,conversion)\n \n \n format_spec,auto_arg_index=self._vformat(\n format_spec,args,kwargs,\n used_args,recursion_depth -1,\n auto_arg_index=auto_arg_index)\n \n \n result.append(self.format_field(obj,format_spec))\n \n return ''.join(result),auto_arg_index\n \n \n def get_value(self,key,args,kwargs):\n if isinstance(key,int):\n return args[key]\n else :\n return kwargs[key]\n \n \n def check_unused_args(self,used_args,args,kwargs):\n pass\n \n \n def format_field(self,value,format_spec):\n return format(value,format_spec)\n \n \n def convert_field(self,value,conversion):\n \n if conversion is None :\n return value\n elif conversion =='s':\n return str(value)\n elif conversion =='r':\n return repr(value)\n elif conversion =='a':\n return ascii(value)\n raise ValueError(\"Unknown conversion specifier {0!s}\".format(conversion))\n \n \n \n \n \n \n \n \n \n def parse(self,format_string):\n return _string.formatter_parser(format_string)\n \n \n \n \n \n \n \n def get_field(self,field_name,args,kwargs):\n first,rest=_string.formatter_field_name_split(field_name)\n \n obj=self.get_value(first,args,kwargs)\n \n \n \n for is_attr,i in rest:\n if is_attr:\n obj=getattr(obj,i)\n else :\n obj=obj[i]\n \n return obj,first\n", ["_string", "collections", "re"]], "struct": [".py", "__all__=[\n\n'calcsize','pack','pack_into','unpack','unpack_from',\n'iter_unpack',\n\n\n'Struct',\n\n\n'error'\n]\n\nfrom _struct import *\nfrom _struct import _clearcache\nfrom _struct import __doc__\n", ["_struct"]], "subprocess": [".py", "\n\n\n\n\n\n\n\n\nr\"\"\"Subprocesses with accessible I/O streams\n\nThis module allows you to spawn processes, connect to their\ninput/output/error pipes, and obtain their return codes.\n\nFor a complete description of this module see the Python documentation.\n\nMain API\n========\nrun(...): Runs a command, waits for it to complete, then returns a\n CompletedProcess instance.\nPopen(...): A class for flexibly executing a command in a new process\n\nConstants\n---------\nDEVNULL: Special value that indicates that os.devnull should be used\nPIPE: Special value that indicates a pipe should be created\nSTDOUT: Special value that indicates that stderr should go to stdout\n\n\nOlder API\n=========\ncall(...): Runs a command, waits for it to complete, then returns\n the return code.\ncheck_call(...): Same as call() but raises CalledProcessError()\n if return code is not 0\ncheck_output(...): Same as check_call() but returns the contents of\n stdout instead of a return code\ngetoutput(...): Runs a command in the shell, waits for it to complete,\n then returns the output\ngetstatusoutput(...): Runs a command in the shell, waits for it to complete,\n then returns a (exitcode, output) tuple\n\"\"\"\n\nimport sys\n_mswindows=(sys.platform ==\"win32\")\n\nimport io\nimport os\nimport time\nimport signal\nimport builtins\nimport warnings\nimport errno\nfrom time import monotonic as _time\n\n\nclass SubprocessError(Exception):pass\n\n\nclass CalledProcessError(SubprocessError):\n ''\n\n\n\n\n \n def __init__(self,returncode,cmd,output=None ,stderr=None ):\n self.returncode=returncode\n self.cmd=cmd\n self.output=output\n self.stderr=stderr\n \n def __str__(self):\n if self.returncode and self.returncode <0:\n try :\n return \"Command '%s' died with %r.\"%(\n self.cmd,signal.Signals(-self.returncode))\n except ValueError:\n return \"Command '%s' died with unknown signal %d.\"%(\n self.cmd,-self.returncode)\n else :\n return \"Command '%s' returned non-zero exit status %d.\"%(\n self.cmd,self.returncode)\n \n @property\n def stdout(self):\n ''\n return self.output\n \n @stdout.setter\n def stdout(self,value):\n \n \n self.output=value\n \n \nclass TimeoutExpired(SubprocessError):\n ''\n\n\n\n\n \n def __init__(self,cmd,timeout,output=None ,stderr=None ):\n self.cmd=cmd\n self.timeout=timeout\n self.output=output\n self.stderr=stderr\n \n def __str__(self):\n return (\"Command '%s' timed out after %s seconds\"%\n (self.cmd,self.timeout))\n \n @property\n def stdout(self):\n return self.output\n \n @stdout.setter\n def stdout(self,value):\n \n \n self.output=value\n \n \nif _mswindows:\n import threading\n import msvcrt\n import _winapi\n class STARTUPINFO:\n def __init__(self,*,dwFlags=0,hStdInput=None ,hStdOutput=None ,\n hStdError=None ,wShowWindow=0,lpAttributeList=None ):\n self.dwFlags=dwFlags\n self.hStdInput=hStdInput\n self.hStdOutput=hStdOutput\n self.hStdError=hStdError\n self.wShowWindow=wShowWindow\n self.lpAttributeList=lpAttributeList or {\"handle_list\":[]}\nelse :\n import _posixsubprocess\n import select\n import selectors\n import threading\n \n \n \n \n _PIPE_BUF=getattr(select,'PIPE_BUF',512)\n \n \n \n \n if hasattr(selectors,'PollSelector'):\n _PopenSelector=selectors.PollSelector\n else :\n _PopenSelector=selectors.SelectSelector\n \n \n__all__=[\"Popen\",\"PIPE\",\"STDOUT\",\"call\",\"check_call\",\"getstatusoutput\",\n\"getoutput\",\"check_output\",\"run\",\"CalledProcessError\",\"DEVNULL\",\n\"SubprocessError\",\"TimeoutExpired\",\"CompletedProcess\"]\n\n\n\nif _mswindows:\n from _winapi import (CREATE_NEW_CONSOLE,CREATE_NEW_PROCESS_GROUP,\n STD_INPUT_HANDLE,STD_OUTPUT_HANDLE,\n STD_ERROR_HANDLE,SW_HIDE,\n STARTF_USESTDHANDLES,STARTF_USESHOWWINDOW,\n ABOVE_NORMAL_PRIORITY_CLASS,BELOW_NORMAL_PRIORITY_CLASS,\n HIGH_PRIORITY_CLASS,IDLE_PRIORITY_CLASS,\n NORMAL_PRIORITY_CLASS,REALTIME_PRIORITY_CLASS,\n CREATE_NO_WINDOW,DETACHED_PROCESS,\n CREATE_DEFAULT_ERROR_MODE,CREATE_BREAKAWAY_FROM_JOB)\n \n __all__.extend([\"CREATE_NEW_CONSOLE\",\"CREATE_NEW_PROCESS_GROUP\",\n \"STD_INPUT_HANDLE\",\"STD_OUTPUT_HANDLE\",\n \"STD_ERROR_HANDLE\",\"SW_HIDE\",\n \"STARTF_USESTDHANDLES\",\"STARTF_USESHOWWINDOW\",\n \"STARTUPINFO\",\n \"ABOVE_NORMAL_PRIORITY_CLASS\",\"BELOW_NORMAL_PRIORITY_CLASS\",\n \"HIGH_PRIORITY_CLASS\",\"IDLE_PRIORITY_CLASS\",\n \"NORMAL_PRIORITY_CLASS\",\"REALTIME_PRIORITY_CLASS\",\n \"CREATE_NO_WINDOW\",\"DETACHED_PROCESS\",\n \"CREATE_DEFAULT_ERROR_MODE\",\"CREATE_BREAKAWAY_FROM_JOB\"])\n \n class Handle(int):\n closed=False\n \n def Close(self,CloseHandle=_winapi.CloseHandle):\n if not self.closed:\n self.closed=True\n CloseHandle(self)\n \n def Detach(self):\n if not self.closed:\n self.closed=True\n return int(self)\n raise ValueError(\"already closed\")\n \n def __repr__(self):\n return \"%s(%d)\"%(self.__class__.__name__,int(self))\n \n __del__=Close\n __str__=__repr__\n \n \n \n \n \n \n_active=[]\n\ndef _cleanup():\n for inst in _active[:]:\n res=inst._internal_poll(_deadstate=sys.maxsize)\n if res is not None :\n try :\n _active.remove(inst)\n except ValueError:\n \n \n pass\n \nPIPE=-1\nSTDOUT=-2\nDEVNULL=-3\n\n\n\n\n\n\ndef _optim_args_from_interpreter_flags():\n ''\n \n args=[]\n value=sys.flags.optimize\n if value >0:\n args.append('-'+'O'*value)\n return args\n \n \ndef _args_from_interpreter_flags():\n ''\n \n flag_opt_map={\n 'debug':'d',\n \n \n 'dont_write_bytecode':'B',\n 'no_user_site':'s',\n 'no_site':'S',\n 'ignore_environment':'E',\n 'verbose':'v',\n 'bytes_warning':'b',\n 'quiet':'q',\n \n }\n args=_optim_args_from_interpreter_flags()\n for flag,opt in flag_opt_map.items():\n v=getattr(sys.flags,flag)\n if v >0:\n args.append('-'+opt *v)\n \n \n warnopts=sys.warnoptions[:]\n bytes_warning=sys.flags.bytes_warning\n xoptions=getattr(sys,'_xoptions',{})\n dev_mode=('dev'in xoptions)\n \n if bytes_warning >1:\n warnopts.remove(\"error::BytesWarning\")\n elif bytes_warning:\n warnopts.remove(\"default::BytesWarning\")\n if dev_mode:\n warnopts.remove('default')\n for opt in warnopts:\n args.append('-W'+opt)\n \n \n if dev_mode:\n args.extend(('-X','dev'))\n for opt in ('faulthandler','tracemalloc','importtime',\n 'showalloccount','showrefcount','utf8'):\n if opt in xoptions:\n value=xoptions[opt]\n if value is True :\n arg=opt\n else :\n arg='%s=%s'%(opt,value)\n args.extend(('-X',arg))\n \n return args\n \n \ndef call(*popenargs,timeout=None ,**kwargs):\n ''\n\n\n\n\n\n \n with Popen(*popenargs,**kwargs)as p:\n try :\n return p.wait(timeout=timeout)\n except :\n p.kill()\n \n raise\n \n \ndef check_call(*popenargs,**kwargs):\n ''\n\n\n\n\n\n\n\n \n retcode=call(*popenargs,**kwargs)\n if retcode:\n cmd=kwargs.get(\"args\")\n if cmd is None :\n cmd=popenargs[0]\n raise CalledProcessError(retcode,cmd)\n return 0\n \n \ndef check_output(*popenargs,timeout=None ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if 'stdout'in kwargs:\n raise ValueError('stdout argument not allowed, it will be overridden.')\n \n if 'input'in kwargs and kwargs['input']is None :\n \n \n kwargs['input']=''if kwargs.get('universal_newlines',False )else b''\n \n return run(*popenargs,stdout=PIPE,timeout=timeout,check=True ,\n **kwargs).stdout\n \n \nclass CompletedProcess(object):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,args,returncode,stdout=None ,stderr=None ):\n self.args=args\n self.returncode=returncode\n self.stdout=stdout\n self.stderr=stderr\n \n def __repr__(self):\n args=['args={!r}'.format(self.args),\n 'returncode={!r}'.format(self.returncode)]\n if self.stdout is not None :\n args.append('stdout={!r}'.format(self.stdout))\n if self.stderr is not None :\n args.append('stderr={!r}'.format(self.stderr))\n return \"{}({})\".format(type(self).__name__,', '.join(args))\n \n def check_returncode(self):\n ''\n if self.returncode:\n raise CalledProcessError(self.returncode,self.args,self.stdout,\n self.stderr)\n \n \ndef run(*popenargs,\ninput=None ,capture_output=False ,timeout=None ,check=False ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if input is not None :\n if 'stdin'in kwargs:\n raise ValueError('stdin and input arguments may not both be used.')\n kwargs['stdin']=PIPE\n \n if capture_output:\n if ('stdout'in kwargs)or ('stderr'in kwargs):\n raise ValueError('stdout and stderr arguments may not be used '\n 'with capture_output.')\n kwargs['stdout']=PIPE\n kwargs['stderr']=PIPE\n \n with Popen(*popenargs,**kwargs)as process:\n try :\n stdout,stderr=process.communicate(input,timeout=timeout)\n except TimeoutExpired:\n process.kill()\n stdout,stderr=process.communicate()\n raise TimeoutExpired(process.args,timeout,output=stdout,\n stderr=stderr)\n except :\n process.kill()\n \n raise\n retcode=process.poll()\n if check and retcode:\n raise CalledProcessError(retcode,process.args,\n output=stdout,stderr=stderr)\n return CompletedProcess(process.args,retcode,stdout,stderr)\n \n \ndef list2cmdline(seq):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n result=[]\n needquote=False\n for arg in seq:\n bs_buf=[]\n \n \n if result:\n result.append(' ')\n \n needquote=(\" \"in arg)or (\"\\t\"in arg)or not arg\n if needquote:\n result.append('\"')\n \n for c in arg:\n if c =='\\\\':\n \n bs_buf.append(c)\n elif c =='\"':\n \n result.append('\\\\'*len(bs_buf)*2)\n bs_buf=[]\n result.append('\\\\\"')\n else :\n \n if bs_buf:\n result.extend(bs_buf)\n bs_buf=[]\n result.append(c)\n \n \n if bs_buf:\n result.extend(bs_buf)\n \n if needquote:\n result.extend(bs_buf)\n result.append('\"')\n \n return ''.join(result)\n \n \n \n \n \ndef getstatusoutput(cmd):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n data=check_output(cmd,shell=True ,text=True ,stderr=STDOUT)\n exitcode=0\n except CalledProcessError as ex:\n data=ex.output\n exitcode=ex.returncode\n if data[-1:]=='\\n':\n data=data[:-1]\n return exitcode,data\n \ndef getoutput(cmd):\n ''\n\n\n\n\n\n\n\n \n return getstatusoutput(cmd)[1]\n \n \nclass Popen(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _child_created=False\n \n def __init__(self,args,bufsize=-1,executable=None ,\n stdin=None ,stdout=None ,stderr=None ,\n preexec_fn=None ,close_fds=True ,\n shell=False ,cwd=None ,env=None ,universal_newlines=None ,\n startupinfo=None ,creationflags=0,\n restore_signals=True ,start_new_session=False ,\n pass_fds=(),*,encoding=None ,errors=None ,text=None ):\n ''\n _cleanup()\n \n \n \n \n \n self._waitpid_lock=threading.Lock()\n \n self._input=None\n self._communication_started=False\n if bufsize is None :\n bufsize=-1\n if not isinstance(bufsize,int):\n raise TypeError(\"bufsize must be an integer\")\n \n if _mswindows:\n if preexec_fn is not None :\n raise ValueError(\"preexec_fn is not supported on Windows \"\n \"platforms\")\n else :\n \n if pass_fds and not close_fds:\n warnings.warn(\"pass_fds overriding close_fds.\",RuntimeWarning)\n close_fds=True\n if startupinfo is not None :\n raise ValueError(\"startupinfo is only supported on Windows \"\n \"platforms\")\n if creationflags !=0:\n raise ValueError(\"creationflags is only supported on Windows \"\n \"platforms\")\n \n self.args=args\n self.stdin=None\n self.stdout=None\n self.stderr=None\n self.pid=None\n self.returncode=None\n self.encoding=encoding\n self.errors=errors\n \n \n if (text is not None and universal_newlines is not None\n and bool(universal_newlines)!=bool(text)):\n raise SubprocessError('Cannot disambiguate when both text '\n 'and universal_newlines are supplied but '\n 'different. Pass one or the other.')\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)=self._get_handles(stdin,stdout,stderr)\n \n \n \n \n \n if _mswindows:\n if p2cwrite !=-1:\n p2cwrite=msvcrt.open_osfhandle(p2cwrite.Detach(),0)\n if c2pread !=-1:\n c2pread=msvcrt.open_osfhandle(c2pread.Detach(),0)\n if errread !=-1:\n errread=msvcrt.open_osfhandle(errread.Detach(),0)\n \n self.text_mode=encoding or errors or text or universal_newlines\n \n \n \n \n self._sigint_wait_secs=0.25\n \n self._closed_child_pipe_fds=False\n \n try :\n if p2cwrite !=-1:\n self.stdin=io.open(p2cwrite,'wb',bufsize)\n if self.text_mode:\n self.stdin=io.TextIOWrapper(self.stdin,write_through=True ,\n line_buffering=(bufsize ==1),\n encoding=encoding,errors=errors)\n if c2pread !=-1:\n self.stdout=io.open(c2pread,'rb',bufsize)\n if self.text_mode:\n self.stdout=io.TextIOWrapper(self.stdout,\n encoding=encoding,errors=errors)\n if errread !=-1:\n self.stderr=io.open(errread,'rb',bufsize)\n if self.text_mode:\n self.stderr=io.TextIOWrapper(self.stderr,\n encoding=encoding,errors=errors)\n \n self._execute_child(args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n restore_signals,start_new_session)\n except :\n \n for f in filter(None ,(self.stdin,self.stdout,self.stderr)):\n try :\n f.close()\n except OSError:\n pass\n \n if not self._closed_child_pipe_fds:\n to_close=[]\n if stdin ==PIPE:\n to_close.append(p2cread)\n if stdout ==PIPE:\n to_close.append(c2pwrite)\n if stderr ==PIPE:\n to_close.append(errwrite)\n if hasattr(self,'_devnull'):\n to_close.append(self._devnull)\n for fd in to_close:\n try :\n if _mswindows and isinstance(fd,Handle):\n fd.Close()\n else :\n os.close(fd)\n except OSError:\n pass\n \n raise\n \n @property\n def universal_newlines(self):\n \n \n return self.text_mode\n \n @universal_newlines.setter\n def universal_newlines(self,universal_newlines):\n self.text_mode=bool(universal_newlines)\n \n def _translate_newlines(self,data,encoding,errors):\n data=data.decode(encoding,errors)\n return data.replace(\"\\r\\n\",\"\\n\").replace(\"\\r\",\"\\n\")\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,value,traceback):\n if self.stdout:\n self.stdout.close()\n if self.stderr:\n self.stderr.close()\n try :\n if self.stdin:\n self.stdin.close()\n finally :\n if exc_type ==KeyboardInterrupt:\n \n \n \n \n \n \n \n if self._sigint_wait_secs >0:\n try :\n self._wait(timeout=self._sigint_wait_secs)\n except TimeoutExpired:\n pass\n self._sigint_wait_secs=0\n return\n \n \n self.wait()\n \n def __del__(self,_maxsize=sys.maxsize,_warn=warnings.warn):\n if not self._child_created:\n \n return\n if self.returncode is None :\n \n \n _warn(\"subprocess %s is still running\"%self.pid,\n ResourceWarning,source=self)\n \n self._internal_poll(_deadstate=_maxsize)\n if self.returncode is None and _active is not None :\n \n _active.append(self)\n \n def _get_devnull(self):\n if not hasattr(self,'_devnull'):\n self._devnull=os.open(os.devnull,os.O_RDWR)\n return self._devnull\n \n def _stdin_write(self,input):\n if input:\n try :\n self.stdin.write(input)\n except BrokenPipeError:\n pass\n except OSError as exc:\n if exc.errno ==errno.EINVAL:\n \n \n \n pass\n else :\n raise\n \n try :\n self.stdin.close()\n except BrokenPipeError:\n pass\n except OSError as exc:\n if exc.errno ==errno.EINVAL:\n pass\n else :\n raise\n \n def communicate(self,input=None ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if self._communication_started and input:\n raise ValueError(\"Cannot send input after starting communication\")\n \n \n \n \n if (timeout is None and not self._communication_started and\n [self.stdin,self.stdout,self.stderr].count(None )>=2):\n stdout=None\n stderr=None\n if self.stdin:\n self._stdin_write(input)\n elif self.stdout:\n stdout=self.stdout.read()\n self.stdout.close()\n elif self.stderr:\n stderr=self.stderr.read()\n self.stderr.close()\n self.wait()\n else :\n if timeout is not None :\n endtime=_time()+timeout\n else :\n endtime=None\n \n try :\n stdout,stderr=self._communicate(input,endtime,timeout)\n except KeyboardInterrupt:\n \n \n if timeout is not None :\n sigint_timeout=min(self._sigint_wait_secs,\n self._remaining_time(endtime))\n else :\n sigint_timeout=self._sigint_wait_secs\n self._sigint_wait_secs=0\n try :\n self._wait(timeout=sigint_timeout)\n except TimeoutExpired:\n pass\n raise\n \n finally :\n self._communication_started=True\n \n sts=self.wait(timeout=self._remaining_time(endtime))\n \n return (stdout,stderr)\n \n \n def poll(self):\n ''\n \n return self._internal_poll()\n \n \n def _remaining_time(self,endtime):\n ''\n if endtime is None :\n return None\n else :\n return endtime -_time()\n \n \n def _check_timeout(self,endtime,orig_timeout):\n ''\n if endtime is None :\n return\n if _time()>endtime:\n raise TimeoutExpired(self.args,orig_timeout)\n \n \n def wait(self,timeout=None ):\n ''\n if timeout is not None :\n endtime=_time()+timeout\n try :\n return self._wait(timeout=timeout)\n except KeyboardInterrupt:\n \n \n \n \n if timeout is not None :\n sigint_timeout=min(self._sigint_wait_secs,\n self._remaining_time(endtime))\n else :\n sigint_timeout=self._sigint_wait_secs\n self._sigint_wait_secs=0\n try :\n self._wait(timeout=sigint_timeout)\n except TimeoutExpired:\n pass\n raise\n \n \n if _mswindows:\n \n \n \n def _get_handles(self,stdin,stdout,stderr):\n ''\n\n \n if stdin is None and stdout is None and stderr is None :\n return (-1,-1,-1,-1,-1,-1)\n \n p2cread,p2cwrite=-1,-1\n c2pread,c2pwrite=-1,-1\n errread,errwrite=-1,-1\n \n if stdin is None :\n p2cread=_winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE)\n if p2cread is None :\n p2cread,_=_winapi.CreatePipe(None ,0)\n p2cread=Handle(p2cread)\n _winapi.CloseHandle(_)\n elif stdin ==PIPE:\n p2cread,p2cwrite=_winapi.CreatePipe(None ,0)\n p2cread,p2cwrite=Handle(p2cread),Handle(p2cwrite)\n elif stdin ==DEVNULL:\n p2cread=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stdin,int):\n p2cread=msvcrt.get_osfhandle(stdin)\n else :\n \n p2cread=msvcrt.get_osfhandle(stdin.fileno())\n p2cread=self._make_inheritable(p2cread)\n \n if stdout is None :\n c2pwrite=_winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE)\n if c2pwrite is None :\n _,c2pwrite=_winapi.CreatePipe(None ,0)\n c2pwrite=Handle(c2pwrite)\n _winapi.CloseHandle(_)\n elif stdout ==PIPE:\n c2pread,c2pwrite=_winapi.CreatePipe(None ,0)\n c2pread,c2pwrite=Handle(c2pread),Handle(c2pwrite)\n elif stdout ==DEVNULL:\n c2pwrite=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stdout,int):\n c2pwrite=msvcrt.get_osfhandle(stdout)\n else :\n \n c2pwrite=msvcrt.get_osfhandle(stdout.fileno())\n c2pwrite=self._make_inheritable(c2pwrite)\n \n if stderr is None :\n errwrite=_winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE)\n if errwrite is None :\n _,errwrite=_winapi.CreatePipe(None ,0)\n errwrite=Handle(errwrite)\n _winapi.CloseHandle(_)\n elif stderr ==PIPE:\n errread,errwrite=_winapi.CreatePipe(None ,0)\n errread,errwrite=Handle(errread),Handle(errwrite)\n elif stderr ==STDOUT:\n errwrite=c2pwrite\n elif stderr ==DEVNULL:\n errwrite=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stderr,int):\n errwrite=msvcrt.get_osfhandle(stderr)\n else :\n \n errwrite=msvcrt.get_osfhandle(stderr.fileno())\n errwrite=self._make_inheritable(errwrite)\n \n return (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n def _make_inheritable(self,handle):\n ''\n h=_winapi.DuplicateHandle(\n _winapi.GetCurrentProcess(),handle,\n _winapi.GetCurrentProcess(),0,1,\n _winapi.DUPLICATE_SAME_ACCESS)\n return Handle(h)\n \n \n def _filter_handle_list(self,handle_list):\n ''\n\n \n \n \n \n return list({handle for handle in handle_list\n if handle&0x3 !=0x3\n or _winapi.GetFileType(handle)!=\n _winapi.FILE_TYPE_CHAR})\n \n \n def _execute_child(self,args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n unused_restore_signals,unused_start_new_session):\n ''\n \n assert not pass_fds,\"pass_fds not supported on Windows.\"\n \n if not isinstance(args,str):\n args=list2cmdline(args)\n \n \n if startupinfo is None :\n startupinfo=STARTUPINFO()\n \n use_std_handles=-1 not in (p2cread,c2pwrite,errwrite)\n if use_std_handles:\n startupinfo.dwFlags |=_winapi.STARTF_USESTDHANDLES\n startupinfo.hStdInput=p2cread\n startupinfo.hStdOutput=c2pwrite\n startupinfo.hStdError=errwrite\n \n attribute_list=startupinfo.lpAttributeList\n have_handle_list=bool(attribute_list and\n \"handle_list\"in attribute_list and\n attribute_list[\"handle_list\"])\n \n \n if have_handle_list or (use_std_handles and close_fds):\n if attribute_list is None :\n attribute_list=startupinfo.lpAttributeList={}\n handle_list=attribute_list[\"handle_list\"]=\\\n list(attribute_list.get(\"handle_list\",[]))\n \n if use_std_handles:\n handle_list +=[int(p2cread),int(c2pwrite),int(errwrite)]\n \n handle_list[:]=self._filter_handle_list(handle_list)\n \n if handle_list:\n if not close_fds:\n warnings.warn(\"startupinfo.lpAttributeList['handle_list'] \"\n \"overriding close_fds\",RuntimeWarning)\n \n \n \n \n close_fds=False\n \n if shell:\n startupinfo.dwFlags |=_winapi.STARTF_USESHOWWINDOW\n startupinfo.wShowWindow=_winapi.SW_HIDE\n comspec=os.environ.get(\"COMSPEC\",\"cmd.exe\")\n args='{} /c \"{}\"'.format(comspec,args)\n \n \n try :\n hp,ht,pid,tid=_winapi.CreateProcess(executable,args,\n \n None ,None ,\n int(not close_fds),\n creationflags,\n env,\n os.fspath(cwd)if cwd is not None else None ,\n startupinfo)\n finally :\n \n \n \n \n \n \n if p2cread !=-1:\n p2cread.Close()\n if c2pwrite !=-1:\n c2pwrite.Close()\n if errwrite !=-1:\n errwrite.Close()\n if hasattr(self,'_devnull'):\n os.close(self._devnull)\n \n \n self._closed_child_pipe_fds=True\n \n \n self._child_created=True\n self._handle=Handle(hp)\n self.pid=pid\n _winapi.CloseHandle(ht)\n \n def _internal_poll(self,_deadstate=None ,\n _WaitForSingleObject=_winapi.WaitForSingleObject,\n _WAIT_OBJECT_0=_winapi.WAIT_OBJECT_0,\n _GetExitCodeProcess=_winapi.GetExitCodeProcess):\n ''\n\n\n\n\n\n \n if self.returncode is None :\n if _WaitForSingleObject(self._handle,0)==_WAIT_OBJECT_0:\n self.returncode=_GetExitCodeProcess(self._handle)\n return self.returncode\n \n \n def _wait(self,timeout):\n ''\n if timeout is None :\n timeout_millis=_winapi.INFINITE\n else :\n timeout_millis=int(timeout *1000)\n if self.returncode is None :\n \n result=_winapi.WaitForSingleObject(self._handle,\n timeout_millis)\n if result ==_winapi.WAIT_TIMEOUT:\n raise TimeoutExpired(self.args,timeout)\n self.returncode=_winapi.GetExitCodeProcess(self._handle)\n return self.returncode\n \n \n def _readerthread(self,fh,buffer):\n buffer.append(fh.read())\n fh.close()\n \n \n def _communicate(self,input,endtime,orig_timeout):\n \n \n if self.stdout and not hasattr(self,\"_stdout_buff\"):\n self._stdout_buff=[]\n self.stdout_thread=\\\n threading.Thread(target=self._readerthread,\n args=(self.stdout,self._stdout_buff))\n self.stdout_thread.daemon=True\n self.stdout_thread.start()\n if self.stderr and not hasattr(self,\"_stderr_buff\"):\n self._stderr_buff=[]\n self.stderr_thread=\\\n threading.Thread(target=self._readerthread,\n args=(self.stderr,self._stderr_buff))\n self.stderr_thread.daemon=True\n self.stderr_thread.start()\n \n if self.stdin:\n self._stdin_write(input)\n \n \n \n \n if self.stdout is not None :\n self.stdout_thread.join(self._remaining_time(endtime))\n if self.stdout_thread.is_alive():\n raise TimeoutExpired(self.args,orig_timeout)\n if self.stderr is not None :\n self.stderr_thread.join(self._remaining_time(endtime))\n if self.stderr_thread.is_alive():\n raise TimeoutExpired(self.args,orig_timeout)\n \n \n \n stdout=None\n stderr=None\n if self.stdout:\n stdout=self._stdout_buff\n self.stdout.close()\n if self.stderr:\n stderr=self._stderr_buff\n self.stderr.close()\n \n \n if stdout is not None :\n stdout=stdout[0]\n if stderr is not None :\n stderr=stderr[0]\n \n return (stdout,stderr)\n \n def send_signal(self,sig):\n ''\n \n if self.returncode is not None :\n return\n if sig ==signal.SIGTERM:\n self.terminate()\n elif sig ==signal.CTRL_C_EVENT:\n os.kill(self.pid,signal.CTRL_C_EVENT)\n elif sig ==signal.CTRL_BREAK_EVENT:\n os.kill(self.pid,signal.CTRL_BREAK_EVENT)\n else :\n raise ValueError(\"Unsupported signal: {}\".format(sig))\n \n def terminate(self):\n ''\n \n if self.returncode is not None :\n return\n try :\n _winapi.TerminateProcess(self._handle,1)\n except PermissionError:\n \n \n rc=_winapi.GetExitCodeProcess(self._handle)\n if rc ==_winapi.STILL_ACTIVE:\n raise\n self.returncode=rc\n \n kill=terminate\n \n else :\n \n \n \n def _get_handles(self,stdin,stdout,stderr):\n ''\n\n \n p2cread,p2cwrite=-1,-1\n c2pread,c2pwrite=-1,-1\n errread,errwrite=-1,-1\n \n if stdin is None :\n pass\n elif stdin ==PIPE:\n p2cread,p2cwrite=os.pipe()\n elif stdin ==DEVNULL:\n p2cread=self._get_devnull()\n elif isinstance(stdin,int):\n p2cread=stdin\n else :\n \n p2cread=stdin.fileno()\n \n if stdout is None :\n pass\n elif stdout ==PIPE:\n c2pread,c2pwrite=os.pipe()\n elif stdout ==DEVNULL:\n c2pwrite=self._get_devnull()\n elif isinstance(stdout,int):\n c2pwrite=stdout\n else :\n \n c2pwrite=stdout.fileno()\n \n if stderr is None :\n pass\n elif stderr ==PIPE:\n errread,errwrite=os.pipe()\n elif stderr ==STDOUT:\n if c2pwrite !=-1:\n errwrite=c2pwrite\n else :\n errwrite=sys.__stdout__.fileno()\n elif stderr ==DEVNULL:\n errwrite=self._get_devnull()\n elif isinstance(stderr,int):\n errwrite=stderr\n else :\n \n errwrite=stderr.fileno()\n \n return (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n def _execute_child(self,args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n restore_signals,start_new_session):\n ''\n \n if isinstance(args,(str,bytes)):\n args=[args]\n else :\n args=list(args)\n \n if shell:\n \n unix_shell=('/system/bin/sh'if\n hasattr(sys,'getandroidapilevel')else '/bin/sh')\n args=[unix_shell,\"-c\"]+args\n if executable:\n args[0]=executable\n \n if executable is None :\n executable=args[0]\n orig_executable=executable\n \n \n \n \n errpipe_read,errpipe_write=os.pipe()\n \n low_fds_to_close=[]\n while errpipe_write <3:\n low_fds_to_close.append(errpipe_write)\n errpipe_write=os.dup(errpipe_write)\n for low_fd in low_fds_to_close:\n os.close(low_fd)\n try :\n try :\n \n \n \n \n \n if env is not None :\n env_list=[]\n for k,v in env.items():\n k=os.fsencode(k)\n if b'='in k:\n raise ValueError(\"illegal environment variable name\")\n env_list.append(k+b'='+os.fsencode(v))\n else :\n env_list=None\n executable=os.fsencode(executable)\n if os.path.dirname(executable):\n executable_list=(executable,)\n else :\n \n executable_list=tuple(\n os.path.join(os.fsencode(dir),executable)\n for dir in os.get_exec_path(env))\n fds_to_keep=set(pass_fds)\n fds_to_keep.add(errpipe_write)\n self.pid=_posixsubprocess.fork_exec(\n args,executable_list,\n close_fds,tuple(sorted(map(int,fds_to_keep))),\n cwd,env_list,\n p2cread,p2cwrite,c2pread,c2pwrite,\n errread,errwrite,\n errpipe_read,errpipe_write,\n restore_signals,start_new_session,preexec_fn)\n self._child_created=True\n finally :\n \n os.close(errpipe_write)\n \n \n devnull_fd=getattr(self,'_devnull',None )\n if p2cread !=-1 and p2cwrite !=-1 and p2cread !=devnull_fd:\n os.close(p2cread)\n if c2pwrite !=-1 and c2pread !=-1 and c2pwrite !=devnull_fd:\n os.close(c2pwrite)\n if errwrite !=-1 and errread !=-1 and errwrite !=devnull_fd:\n os.close(errwrite)\n if devnull_fd is not None :\n os.close(devnull_fd)\n \n self._closed_child_pipe_fds=True\n \n \n \n errpipe_data=bytearray()\n while True :\n part=os.read(errpipe_read,50000)\n errpipe_data +=part\n if not part or len(errpipe_data)>50000:\n break\n finally :\n \n os.close(errpipe_read)\n \n if errpipe_data:\n try :\n pid,sts=os.waitpid(self.pid,0)\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n else :\n self.returncode=sys.maxsize\n except ChildProcessError:\n pass\n \n try :\n exception_name,hex_errno,err_msg=(\n errpipe_data.split(b':',2))\n \n \n \n err_msg=err_msg.decode()\n except ValueError:\n exception_name=b'SubprocessError'\n hex_errno=b'0'\n err_msg='Bad exception data from child: {!r}'.format(\n bytes(errpipe_data))\n child_exception_type=getattr(\n builtins,exception_name.decode('ascii'),\n SubprocessError)\n if issubclass(child_exception_type,OSError)and hex_errno:\n errno_num=int(hex_errno,16)\n child_exec_never_called=(err_msg ==\"noexec\")\n if child_exec_never_called:\n err_msg=\"\"\n \n err_filename=cwd\n else :\n err_filename=orig_executable\n if errno_num !=0:\n err_msg=os.strerror(errno_num)\n if errno_num ==errno.ENOENT:\n err_msg +=': '+repr(err_filename)\n raise child_exception_type(errno_num,err_msg,err_filename)\n raise child_exception_type(err_msg)\n \n \n def _handle_exitstatus(self,sts,_WIFSIGNALED=os.WIFSIGNALED,\n _WTERMSIG=os.WTERMSIG,_WIFEXITED=os.WIFEXITED,\n _WEXITSTATUS=os.WEXITSTATUS,_WIFSTOPPED=os.WIFSTOPPED,\n _WSTOPSIG=os.WSTOPSIG):\n ''\n \n \n if _WIFSIGNALED(sts):\n self.returncode=-_WTERMSIG(sts)\n elif _WIFEXITED(sts):\n self.returncode=_WEXITSTATUS(sts)\n elif _WIFSTOPPED(sts):\n self.returncode=-_WSTOPSIG(sts)\n else :\n \n raise SubprocessError(\"Unknown child exit status!\")\n \n \n def _internal_poll(self,_deadstate=None ,_waitpid=os.waitpid,\n _WNOHANG=os.WNOHANG,_ECHILD=errno.ECHILD):\n ''\n\n\n\n\n\n \n if self.returncode is None :\n if not self._waitpid_lock.acquire(False ):\n \n \n return None\n try :\n if self.returncode is not None :\n return self.returncode\n pid,sts=_waitpid(self.pid,_WNOHANG)\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n except OSError as e:\n if _deadstate is not None :\n self.returncode=_deadstate\n elif e.errno ==_ECHILD:\n \n \n \n \n \n self.returncode=0\n finally :\n self._waitpid_lock.release()\n return self.returncode\n \n \n def _try_wait(self,wait_flags):\n ''\n try :\n (pid,sts)=os.waitpid(self.pid,wait_flags)\n except ChildProcessError:\n \n \n \n pid=self.pid\n sts=0\n return (pid,sts)\n \n \n def _wait(self,timeout):\n ''\n if self.returncode is not None :\n return self.returncode\n \n if timeout is not None :\n endtime=_time()+timeout\n \n \n delay=0.0005\n while True :\n if self._waitpid_lock.acquire(False ):\n try :\n if self.returncode is not None :\n break\n (pid,sts)=self._try_wait(os.WNOHANG)\n assert pid ==self.pid or pid ==0\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n break\n finally :\n self._waitpid_lock.release()\n remaining=self._remaining_time(endtime)\n if remaining <=0:\n raise TimeoutExpired(self.args,timeout)\n delay=min(delay *2,remaining,.05)\n time.sleep(delay)\n else :\n while self.returncode is None :\n with self._waitpid_lock:\n if self.returncode is not None :\n break\n (pid,sts)=self._try_wait(0)\n \n \n \n if pid ==self.pid:\n self._handle_exitstatus(sts)\n return self.returncode\n \n \n def _communicate(self,input,endtime,orig_timeout):\n if self.stdin and not self._communication_started:\n \n \n try :\n self.stdin.flush()\n except BrokenPipeError:\n pass\n if not input:\n try :\n self.stdin.close()\n except BrokenPipeError:\n pass\n \n stdout=None\n stderr=None\n \n \n if not self._communication_started:\n self._fileobj2output={}\n if self.stdout:\n self._fileobj2output[self.stdout]=[]\n if self.stderr:\n self._fileobj2output[self.stderr]=[]\n \n if self.stdout:\n stdout=self._fileobj2output[self.stdout]\n if self.stderr:\n stderr=self._fileobj2output[self.stderr]\n \n self._save_input(input)\n \n if self._input:\n input_view=memoryview(self._input)\n \n with _PopenSelector()as selector:\n if self.stdin and input:\n selector.register(self.stdin,selectors.EVENT_WRITE)\n if self.stdout:\n selector.register(self.stdout,selectors.EVENT_READ)\n if self.stderr:\n selector.register(self.stderr,selectors.EVENT_READ)\n \n while selector.get_map():\n timeout=self._remaining_time(endtime)\n if timeout is not None and timeout <0:\n raise TimeoutExpired(self.args,orig_timeout)\n \n ready=selector.select(timeout)\n self._check_timeout(endtime,orig_timeout)\n \n \n \n \n for key,events in ready:\n if key.fileobj is self.stdin:\n chunk=input_view[self._input_offset:\n self._input_offset+_PIPE_BUF]\n try :\n self._input_offset +=os.write(key.fd,chunk)\n except BrokenPipeError:\n selector.unregister(key.fileobj)\n key.fileobj.close()\n else :\n if self._input_offset >=len(self._input):\n selector.unregister(key.fileobj)\n key.fileobj.close()\n elif key.fileobj in (self.stdout,self.stderr):\n data=os.read(key.fd,32768)\n if not data:\n selector.unregister(key.fileobj)\n key.fileobj.close()\n self._fileobj2output[key.fileobj].append(data)\n \n self.wait(timeout=self._remaining_time(endtime))\n \n \n if stdout is not None :\n stdout=b''.join(stdout)\n if stderr is not None :\n stderr=b''.join(stderr)\n \n \n \n if self.text_mode:\n if stdout is not None :\n stdout=self._translate_newlines(stdout,\n self.stdout.encoding,\n self.stdout.errors)\n if stderr is not None :\n stderr=self._translate_newlines(stderr,\n self.stderr.encoding,\n self.stderr.errors)\n \n return (stdout,stderr)\n \n \n def _save_input(self,input):\n \n \n \n if self.stdin and self._input is None :\n self._input_offset=0\n self._input=input\n if input is not None and self.text_mode:\n self._input=self._input.encode(self.stdin.encoding,\n self.stdin.errors)\n \n \n def send_signal(self,sig):\n ''\n \n if self.returncode is None :\n os.kill(self.pid,sig)\n \n def terminate(self):\n ''\n \n self.send_signal(signal.SIGTERM)\n \n def kill(self):\n ''\n \n self.send_signal(signal.SIGKILL)\n", ["_posixsubprocess", "_winapi", "builtins", "errno", "io", "msvcrt", "os", "select", "selectors", "signal", "sys", "threading", "time", "warnings"]], "sys": [".py", "\nfrom _sys import *\nimport javascript\n\n_getframe=Getframe\n\nabiflags=0\n\nbrython_debug_mode=__BRYTHON__.debug\n\nbase_exec_prefix=__BRYTHON__.brython_path\n\nbase_prefix=__BRYTHON__.brython_path\n\nbuiltin_module_names=__BRYTHON__.builtin_module_names\n\nbyteorder='little'\n\nexec_prefix=__BRYTHON__.brython_path\n\nexecutable=__BRYTHON__.brython_path+'/brython.js'\n\nargv=__BRYTHON__.__ARGV\n\ndef exit(i=None ):\n raise SystemExit('')\n \nclass flag_class:\n\n def __init__(self):\n self.debug=0\n self.inspect=0\n self.interactive=0\n self.optimize=0\n self.dont_write_bytecode=0\n self.no_user_site=0\n self.no_site=0\n self.ignore_environment=0\n self.verbose=0\n self.bytes_warning=0\n self.quiet=0\n self.hash_randomization=1\n \nflags=flag_class()\n\nclass float_info:\n mant_dig=53\n max=javascript.Number.MAX_VALUE\n min=javascript.Number.MIN_VALUE\n radix=2\n \ndef getfilesystemencoding(*args,**kw):\n ''\n\n \n return 'utf-8'\n \ndef getfilesystemencodeerrors():\n return \"utf-8\"\n \ndef getrecursionlimit():\n return 200\n \ndef intern(string):\n return string\n \nmaxsize=2 **63 -1\n\nmaxunicode=1114111\n\nplatform=\"brython\"\n\nprefix=__BRYTHON__.brython_path\n\ndef settrace(tracefunc):\n print(\"set trace\")\n \nversion='.'.join(str(x)for x in __BRYTHON__.version_info[:3])\nversion +=\" (default, %s) \\n[Javascript 1.5] on Brython\"\\\n%__BRYTHON__.compiled_date\nhexversion=0x03070000\n\nclass _version_info:\n\n def __init__(self,version_info):\n self.version_info=version_info\n self.major=version_info[0]\n self.minor=version_info[1]\n self.micro=version_info[2]\n self.releaselevel=version_info[3]\n self.serial=version_info[4]\n \n def __getitem__(self,index):\n if isinstance(self.version_info[index],list):\n return tuple(self.version_info[index])\n return self.version_info[index]\n \n def hexversion(self):\n try :\n return '0%d0%d0%d'%(self.major,self.minor,self.micro)\n finally :\n return '0%d0000'%(self.major)\n \n def __str__(self):\n _s=\"sys.version(major=%d, minor=%d, micro=%d, releaselevel='%s', \"\\\n \"serial=%d)\"\n return _s %(self.major,self.minor,self.micro,\n self.releaselevel,self.serial)\n \n def __eq__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)==other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __ge__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)>=other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __gt__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)>other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __le__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)<=other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __lt__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)0:\n for name in tuple(variables):\n value=notdone[name]\n m1=_findvar1_rx.search(value)\n m2=_findvar2_rx.search(value)\n if m1 and m2:\n m=m1 if m1.start()=\"5\":\n osname=\"solaris\"\n release=\"%d.%s\"%(int(release[0])-3,release[2:])\n \n \n \n bitness={2147483647:\"32bit\",9223372036854775807:\"64bit\"}\n machine +=\".%s\"%bitness[sys.maxsize]\n \n elif osname[:3]==\"aix\":\n return \"%s-%s.%s\"%(osname,version,release)\n elif osname[:6]==\"cygwin\":\n osname=\"cygwin\"\n import re\n rel_re=re.compile(r'[\\d.]+')\n m=rel_re.match(release)\n if m:\n release=m.group()\n elif osname[:6]==\"darwin\":\n import _osx_support\n osname,release,machine=_osx_support.get_platform_osx(\n get_config_vars(),\n osname,release,machine)\n \n return \"%s-%s-%s\"%(osname,release,machine)\n \n \ndef get_python_version():\n return _PY_VERSION_SHORT\n \n \ndef _print_dict(title,data):\n for index,(key,value)in enumerate(sorted(data.items())):\n if index ==0:\n print('%s: '%(title))\n print('\\t%s = \"%s\"'%(key,value))\n \n \ndef _main():\n ''\n if '--generate-posix-vars'in sys.argv:\n _generate_posix_vars()\n return\n print('Platform: \"%s\"'%get_platform())\n print('Python version: \"%s\"'%get_python_version())\n print('Current installation scheme: \"%s\"'%_get_default_scheme())\n print()\n _print_dict('Paths',get_paths())\n print()\n _print_dict('Variables',get_config_vars())\n \n \nif __name__ =='__main__':\n _main()\n", ["_osx_support", "os", "os.path", "pprint", "re", "sys", "types", "warnings"]], "tarfile": [".py", "#!/usr/bin/env python3\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n''\n\n\nversion=\"0.9.0\"\n__author__=\"Lars Gust\\u00e4bel (lars@gustaebel.de)\"\n__credits__=\"Gustavo Niemeyer, Niels Gust\\u00e4bel, Richard Townsend.\"\n\n\n\n\nfrom builtins import open as bltn_open\nimport sys\nimport os\nimport io\nimport shutil\nimport stat\nimport time\nimport struct\nimport copy\nimport re\n\ntry :\n import pwd\nexcept ImportError:\n pwd=None\ntry :\n import grp\nexcept ImportError:\n grp=None\n \n \nsymlink_exception=(AttributeError,NotImplementedError)\ntry :\n\n\n symlink_exception +=(OSError,)\nexcept NameError:\n pass\n \n \n__all__=[\"TarFile\",\"TarInfo\",\"is_tarfile\",\"TarError\",\"ReadError\",\n\"CompressionError\",\"StreamError\",\"ExtractError\",\"HeaderError\",\n\"ENCODING\",\"USTAR_FORMAT\",\"GNU_FORMAT\",\"PAX_FORMAT\",\n\"DEFAULT_FORMAT\",\"open\"]\n\n\n\n\nNUL=b\"\\0\"\nBLOCKSIZE=512\nRECORDSIZE=BLOCKSIZE *20\nGNU_MAGIC=b\"ustar \\0\"\nPOSIX_MAGIC=b\"ustar\\x0000\"\n\nLENGTH_NAME=100\nLENGTH_LINK=100\nLENGTH_PREFIX=155\n\nREGTYPE=b\"0\"\nAREGTYPE=b\"\\0\"\nLNKTYPE=b\"1\"\nSYMTYPE=b\"2\"\nCHRTYPE=b\"3\"\nBLKTYPE=b\"4\"\nDIRTYPE=b\"5\"\nFIFOTYPE=b\"6\"\nCONTTYPE=b\"7\"\n\nGNUTYPE_LONGNAME=b\"L\"\nGNUTYPE_LONGLINK=b\"K\"\nGNUTYPE_SPARSE=b\"S\"\n\nXHDTYPE=b\"x\"\nXGLTYPE=b\"g\"\nSOLARIS_XHDTYPE=b\"X\"\n\nUSTAR_FORMAT=0\nGNU_FORMAT=1\nPAX_FORMAT=2\nDEFAULT_FORMAT=GNU_FORMAT\n\n\n\n\n\nSUPPORTED_TYPES=(REGTYPE,AREGTYPE,LNKTYPE,\nSYMTYPE,DIRTYPE,FIFOTYPE,\nCONTTYPE,CHRTYPE,BLKTYPE,\nGNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nREGULAR_TYPES=(REGTYPE,AREGTYPE,\nCONTTYPE,GNUTYPE_SPARSE)\n\n\nGNU_TYPES=(GNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nPAX_FIELDS=(\"path\",\"linkpath\",\"size\",\"mtime\",\n\"uid\",\"gid\",\"uname\",\"gname\")\n\n\nPAX_NAME_FIELDS={\"path\",\"linkpath\",\"uname\",\"gname\"}\n\n\n\nPAX_NUMBER_FIELDS={\n\"atime\":float,\n\"ctime\":float,\n\"mtime\":float,\n\"uid\":int,\n\"gid\":int,\n\"size\":int\n}\n\n\n\n\nif os.name ==\"nt\":\n ENCODING=\"utf-8\"\nelse :\n ENCODING=sys.getfilesystemencoding()\n \n \n \n \n \ndef stn(s,length,encoding,errors):\n ''\n \n s=s.encode(encoding,errors)\n return s[:length]+(length -len(s))*NUL\n \ndef nts(s,encoding,errors):\n ''\n \n p=s.find(b\"\\0\")\n if p !=-1:\n s=s[:p]\n return s.decode(encoding,errors)\n \ndef nti(s):\n ''\n \n \n \n if s[0]in (0o200,0o377):\n n=0\n for i in range(len(s)-1):\n n <<=8\n n +=s[i+1]\n if s[0]==0o377:\n n=-(256 **(len(s)-1)-n)\n else :\n try :\n s=nts(s,\"ascii\",\"strict\")\n n=int(s.strip()or \"0\",8)\n except ValueError:\n raise InvalidHeaderError(\"invalid header\")\n return n\n \ndef itn(n,digits=8,format=DEFAULT_FORMAT):\n ''\n \n \n \n \n \n \n \n \n \n n=int(n)\n if 0 <=n <8 **(digits -1):\n s=bytes(\"%0*o\"%(digits -1,n),\"ascii\")+NUL\n elif format ==GNU_FORMAT and -256 **(digits -1)<=n <256 **(digits -1):\n if n >=0:\n s=bytearray([0o200])\n else :\n s=bytearray([0o377])\n n=256 **digits+n\n \n for i in range(digits -1):\n s.insert(1,n&0o377)\n n >>=8\n else :\n raise ValueError(\"overflow in number field\")\n \n return s\n \ndef calc_chksums(buf):\n ''\n\n\n\n\n\n\n \n unsigned_chksum=256+sum(struct.unpack_from(\"148B8x356B\",buf))\n signed_chksum=256+sum(struct.unpack_from(\"148b8x356b\",buf))\n return unsigned_chksum,signed_chksum\n \ndef copyfileobj(src,dst,length=None ,exception=OSError,bufsize=None ):\n ''\n\n \n bufsize=bufsize or 16 *1024\n if length ==0:\n return\n if length is None :\n shutil.copyfileobj(src,dst,bufsize)\n return\n \n blocks,remainder=divmod(length,bufsize)\n for b in range(blocks):\n buf=src.read(bufsize)\n if len(buf)self.bufsize:\n self.fileobj.write(self.buf[:self.bufsize])\n self.buf=self.buf[self.bufsize:]\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n \n self.closed=True\n try :\n if self.mode ==\"w\"and self.comptype !=\"tar\":\n self.buf +=self.cmp.flush()\n \n if self.mode ==\"w\"and self.buf:\n self.fileobj.write(self.buf)\n self.buf=b\"\"\n if self.comptype ==\"gz\":\n self.fileobj.write(struct.pack(\"=0:\n blocks,remainder=divmod(pos -self.pos,self.bufsize)\n for i in range(blocks):\n self.read(self.bufsize)\n self.read(remainder)\n else :\n raise StreamError(\"seeking backwards is not allowed\")\n return self.pos\n \n def read(self,size=None ):\n ''\n\n\n \n if size is None :\n t=[]\n while True :\n buf=self._read(self.bufsize)\n if not buf:\n break\n t.append(buf)\n buf=\"\".join(t)\n else :\n buf=self._read(size)\n self.pos +=len(buf)\n return buf\n \n def _read(self,size):\n ''\n \n if self.comptype ==\"tar\":\n return self.__read(size)\n \n c=len(self.dbuf)\n while c lastpos:\n self.map.append((False ,lastpos,offset,None ))\n self.map.append((True ,offset,offset+size,realpos))\n realpos +=size\n lastpos=offset+size\n if lastpos 0:\n while True :\n data,start,stop,offset=self.map[self.map_index]\n if start <=self.position \"%(self.__class__.__name__,self.name,id(self))\n \n def get_info(self):\n ''\n \n info={\n \"name\":self.name,\n \"mode\":self.mode&0o7777,\n \"uid\":self.uid,\n \"gid\":self.gid,\n \"size\":self.size,\n \"mtime\":self.mtime,\n \"chksum\":self.chksum,\n \"type\":self.type,\n \"linkname\":self.linkname,\n \"uname\":self.uname,\n \"gname\":self.gname,\n \"devmajor\":self.devmajor,\n \"devminor\":self.devminor\n }\n \n if info[\"type\"]==DIRTYPE and not info[\"name\"].endswith(\"/\"):\n info[\"name\"]+=\"/\"\n \n return info\n \n def tobuf(self,format=DEFAULT_FORMAT,encoding=ENCODING,errors=\"surrogateescape\"):\n ''\n \n info=self.get_info()\n \n if format ==USTAR_FORMAT:\n return self.create_ustar_header(info,encoding,errors)\n elif format ==GNU_FORMAT:\n return self.create_gnu_header(info,encoding,errors)\n elif format ==PAX_FORMAT:\n return self.create_pax_header(info,encoding)\n else :\n raise ValueError(\"invalid format\")\n \n def create_ustar_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=POSIX_MAGIC\n \n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n raise ValueError(\"linkname is too long\")\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n info[\"prefix\"],info[\"name\"]=self._posix_split_name(info[\"name\"],encoding,errors)\n \n return self._create_header(info,USTAR_FORMAT,encoding,errors)\n \n def create_gnu_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=GNU_MAGIC\n \n buf=b\"\"\n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n buf +=self._create_gnu_long_header(info[\"linkname\"],GNUTYPE_LONGLINK,encoding,errors)\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n buf +=self._create_gnu_long_header(info[\"name\"],GNUTYPE_LONGNAME,encoding,errors)\n \n return buf+self._create_header(info,GNU_FORMAT,encoding,errors)\n \n def create_pax_header(self,info,encoding):\n ''\n\n\n \n info[\"magic\"]=POSIX_MAGIC\n pax_headers=self.pax_headers.copy()\n \n \n \n for name,hname,length in (\n (\"name\",\"path\",LENGTH_NAME),(\"linkname\",\"linkpath\",LENGTH_LINK),\n (\"uname\",\"uname\",32),(\"gname\",\"gname\",32)):\n \n if hname in pax_headers:\n \n continue\n \n \n try :\n info[name].encode(\"ascii\",\"strict\")\n except UnicodeEncodeError:\n pax_headers[hname]=info[name]\n continue\n \n if len(info[name])>length:\n pax_headers[hname]=info[name]\n \n \n \n for name,digits in ((\"uid\",8),(\"gid\",8),(\"size\",12),(\"mtime\",12)):\n if name in pax_headers:\n \n info[name]=0\n continue\n \n val=info[name]\n if not 0 <=val <8 **(digits -1)or isinstance(val,float):\n pax_headers[name]=str(val)\n info[name]=0\n \n \n if pax_headers:\n buf=self._create_pax_generic_header(pax_headers,XHDTYPE,encoding)\n else :\n buf=b\"\"\n \n return buf+self._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")\n \n @classmethod\n def create_pax_global_header(cls,pax_headers):\n ''\n \n return cls._create_pax_generic_header(pax_headers,XGLTYPE,\"utf-8\")\n \n def _posix_split_name(self,name,encoding,errors):\n ''\n\n \n components=name.split(\"/\")\n for i in range(1,len(components)):\n prefix=\"/\".join(components[:i])\n name=\"/\".join(components[i:])\n if len(prefix.encode(encoding,errors))<=LENGTH_PREFIX and\\\n len(name.encode(encoding,errors))<=LENGTH_NAME:\n break\n else :\n raise ValueError(\"name is too long\")\n \n return prefix,name\n \n @staticmethod\n def _create_header(info,format,encoding,errors):\n ''\n\n \n parts=[\n stn(info.get(\"name\",\"\"),100,encoding,errors),\n itn(info.get(\"mode\",0)&0o7777,8,format),\n itn(info.get(\"uid\",0),8,format),\n itn(info.get(\"gid\",0),8,format),\n itn(info.get(\"size\",0),12,format),\n itn(info.get(\"mtime\",0),12,format),\n b\" \",\n info.get(\"type\",REGTYPE),\n stn(info.get(\"linkname\",\"\"),100,encoding,errors),\n info.get(\"magic\",POSIX_MAGIC),\n stn(info.get(\"uname\",\"\"),32,encoding,errors),\n stn(info.get(\"gname\",\"\"),32,encoding,errors),\n itn(info.get(\"devmajor\",0),8,format),\n itn(info.get(\"devminor\",0),8,format),\n stn(info.get(\"prefix\",\"\"),155,encoding,errors)\n ]\n \n buf=struct.pack(\"%ds\"%BLOCKSIZE,b\"\".join(parts))\n chksum=calc_chksums(buf[-BLOCKSIZE:])[0]\n buf=buf[:-364]+bytes(\"%06o\\0\"%chksum,\"ascii\")+buf[-357:]\n return buf\n \n @staticmethod\n def _create_payload(payload):\n ''\n\n \n blocks,remainder=divmod(len(payload),BLOCKSIZE)\n if remainder >0:\n payload +=(BLOCKSIZE -remainder)*NUL\n return payload\n \n @classmethod\n def _create_gnu_long_header(cls,name,type,encoding,errors):\n ''\n\n \n name=name.encode(encoding,errors)+NUL\n \n info={}\n info[\"name\"]=\"././@LongLink\"\n info[\"type\"]=type\n info[\"size\"]=len(name)\n info[\"magic\"]=GNU_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,encoding,errors)+\\\n cls._create_payload(name)\n \n @classmethod\n def _create_pax_generic_header(cls,pax_headers,type,encoding):\n ''\n\n\n \n \n \n binary=False\n for keyword,value in pax_headers.items():\n try :\n value.encode(\"utf-8\",\"strict\")\n except UnicodeEncodeError:\n binary=True\n break\n \n records=b\"\"\n if binary:\n \n records +=b\"21 hdrcharset=BINARY\\n\"\n \n for keyword,value in pax_headers.items():\n keyword=keyword.encode(\"utf-8\")\n if binary:\n \n \n value=value.encode(encoding,\"surrogateescape\")\n else :\n value=value.encode(\"utf-8\")\n \n l=len(keyword)+len(value)+3\n n=p=0\n while True :\n n=l+len(str(p))\n if n ==p:\n break\n p=n\n records +=bytes(str(p),\"ascii\")+b\" \"+keyword+b\"=\"+value+b\"\\n\"\n \n \n \n info={}\n info[\"name\"]=\"././@PaxHeader\"\n info[\"type\"]=type\n info[\"size\"]=len(records)\n info[\"magic\"]=POSIX_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")+\\\n cls._create_payload(records)\n \n @classmethod\n def frombuf(cls,buf,encoding,errors):\n ''\n \n if len(buf)==0:\n raise EmptyHeaderError(\"empty header\")\n if len(buf)!=BLOCKSIZE:\n raise TruncatedHeaderError(\"truncated header\")\n if buf.count(NUL)==BLOCKSIZE:\n raise EOFHeaderError(\"end of file header\")\n \n chksum=nti(buf[148:156])\n if chksum not in calc_chksums(buf):\n raise InvalidHeaderError(\"bad checksum\")\n \n obj=cls()\n obj.name=nts(buf[0:100],encoding,errors)\n obj.mode=nti(buf[100:108])\n obj.uid=nti(buf[108:116])\n obj.gid=nti(buf[116:124])\n obj.size=nti(buf[124:136])\n obj.mtime=nti(buf[136:148])\n obj.chksum=chksum\n obj.type=buf[156:157]\n obj.linkname=nts(buf[157:257],encoding,errors)\n obj.uname=nts(buf[265:297],encoding,errors)\n obj.gname=nts(buf[297:329],encoding,errors)\n obj.devmajor=nti(buf[329:337])\n obj.devminor=nti(buf[337:345])\n prefix=nts(buf[345:500],encoding,errors)\n \n \n \n if obj.type ==AREGTYPE and obj.name.endswith(\"/\"):\n obj.type=DIRTYPE\n \n \n \n \n if obj.type ==GNUTYPE_SPARSE:\n pos=386\n structs=[]\n for i in range(4):\n try :\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[482])\n origsize=nti(buf[483:495])\n obj._sparse_structs=(structs,isextended,origsize)\n \n \n if obj.isdir():\n obj.name=obj.name.rstrip(\"/\")\n \n \n if prefix and obj.type not in GNU_TYPES:\n obj.name=prefix+\"/\"+obj.name\n return obj\n \n @classmethod\n def fromtarfile(cls,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(BLOCKSIZE)\n obj=cls.frombuf(buf,tarfile.encoding,tarfile.errors)\n obj.offset=tarfile.fileobj.tell()-BLOCKSIZE\n return obj._proc_member(tarfile)\n \n \n \n \n \n \n \n \n \n \n \n \n def _proc_member(self,tarfile):\n ''\n\n \n if self.type in (GNUTYPE_LONGNAME,GNUTYPE_LONGLINK):\n return self._proc_gnulong(tarfile)\n elif self.type ==GNUTYPE_SPARSE:\n return self._proc_sparse(tarfile)\n elif self.type in (XHDTYPE,XGLTYPE,SOLARIS_XHDTYPE):\n return self._proc_pax(tarfile)\n else :\n return self._proc_builtin(tarfile)\n \n def _proc_builtin(self,tarfile):\n ''\n\n \n self.offset_data=tarfile.fileobj.tell()\n offset=self.offset_data\n if self.isreg()or self.type not in SUPPORTED_TYPES:\n \n offset +=self._block(self.size)\n tarfile.offset=offset\n \n \n \n self._apply_pax_info(tarfile.pax_headers,tarfile.encoding,tarfile.errors)\n \n return self\n \n def _proc_gnulong(self,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n try :\n next=self.fromtarfile(tarfile)\n except HeaderError:\n raise SubsequentHeaderError(\"missing or bad subsequent header\")\n \n \n \n next.offset=self.offset\n if self.type ==GNUTYPE_LONGNAME:\n next.name=nts(buf,tarfile.encoding,tarfile.errors)\n elif self.type ==GNUTYPE_LONGLINK:\n next.linkname=nts(buf,tarfile.encoding,tarfile.errors)\n \n return next\n \n def _proc_sparse(self,tarfile):\n ''\n \n \n structs,isextended,origsize=self._sparse_structs\n del self._sparse_structs\n \n \n while isextended:\n buf=tarfile.fileobj.read(BLOCKSIZE)\n pos=0\n for i in range(21):\n try :\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n if offset and numbytes:\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[504])\n self.sparse=structs\n \n self.offset_data=tarfile.fileobj.tell()\n tarfile.offset=self.offset_data+self._block(self.size)\n self.size=origsize\n return self\n \n def _proc_pax(self,tarfile):\n ''\n\n \n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n \n \n if self.type ==XGLTYPE:\n pax_headers=tarfile.pax_headers\n else :\n pax_headers=tarfile.pax_headers.copy()\n \n \n \n \n \n \n match=re.search(br\"\\d+ hdrcharset=([^\\n]+)\\n\",buf)\n if match is not None :\n pax_headers[\"hdrcharset\"]=match.group(1).decode(\"utf-8\")\n \n \n \n \n hdrcharset=pax_headers.get(\"hdrcharset\")\n if hdrcharset ==\"BINARY\":\n encoding=tarfile.encoding\n else :\n encoding=\"utf-8\"\n \n \n \n \n \n regex=re.compile(br\"(\\d+) ([^=]+)=\")\n pos=0\n while True :\n match=regex.match(buf,pos)\n if not match:\n break\n \n length,keyword=match.groups()\n length=int(length)\n value=buf[match.end(2)+1:match.start(1)+length -1]\n \n \n \n \n \n \n \n \n keyword=self._decode_pax_field(keyword,\"utf-8\",\"utf-8\",\n tarfile.errors)\n if keyword in PAX_NAME_FIELDS:\n value=self._decode_pax_field(value,encoding,tarfile.encoding,\n tarfile.errors)\n else :\n value=self._decode_pax_field(value,\"utf-8\",\"utf-8\",\n tarfile.errors)\n \n pax_headers[keyword]=value\n pos +=length\n \n \n try :\n next=self.fromtarfile(tarfile)\n except HeaderError:\n raise SubsequentHeaderError(\"missing or bad subsequent header\")\n \n \n if \"GNU.sparse.map\"in pax_headers:\n \n self._proc_gnusparse_01(next,pax_headers)\n \n elif \"GNU.sparse.size\"in pax_headers:\n \n self._proc_gnusparse_00(next,pax_headers,buf)\n \n elif pax_headers.get(\"GNU.sparse.major\")==\"1\"and pax_headers.get(\"GNU.sparse.minor\")==\"0\":\n \n self._proc_gnusparse_10(next,pax_headers,tarfile)\n \n if self.type in (XHDTYPE,SOLARIS_XHDTYPE):\n \n next._apply_pax_info(pax_headers,tarfile.encoding,tarfile.errors)\n next.offset=self.offset\n \n if \"size\"in pax_headers:\n \n \n \n offset=next.offset_data\n if next.isreg()or next.type not in SUPPORTED_TYPES:\n offset +=next._block(next.size)\n tarfile.offset=offset\n \n return next\n \n def _proc_gnusparse_00(self,next,pax_headers,buf):\n ''\n \n offsets=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.offset=(\\d+)\\n\",buf):\n offsets.append(int(match.group(1)))\n numbytes=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.numbytes=(\\d+)\\n\",buf):\n numbytes.append(int(match.group(1)))\n next.sparse=list(zip(offsets,numbytes))\n \n def _proc_gnusparse_01(self,next,pax_headers):\n ''\n \n sparse=[int(x)for x in pax_headers[\"GNU.sparse.map\"].split(\",\")]\n next.sparse=list(zip(sparse[::2],sparse[1::2]))\n \n def _proc_gnusparse_10(self,next,pax_headers,tarfile):\n ''\n \n fields=None\n sparse=[]\n buf=tarfile.fileobj.read(BLOCKSIZE)\n fields,buf=buf.split(b\"\\n\",1)\n fields=int(fields)\n while len(sparse)0:\n self.fileobj.write(NUL *(RECORDSIZE -remainder))\n finally :\n if not self._extfileobj:\n self.fileobj.close()\n \n def getmember(self,name):\n ''\n\n\n\n \n tarinfo=self._getmember(name)\n if tarinfo is None :\n raise KeyError(\"filename %r not found\"%name)\n return tarinfo\n \n def getmembers(self):\n ''\n\n \n self._check()\n if not self._loaded:\n self._load()\n \n return self.members\n \n def getnames(self):\n ''\n\n \n return [tarinfo.name for tarinfo in self.getmembers()]\n \n def gettarinfo(self,name=None ,arcname=None ,fileobj=None ):\n ''\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n \n \n if fileobj is not None :\n name=fileobj.name\n \n \n \n \n if arcname is None :\n arcname=name\n drv,arcname=os.path.splitdrive(arcname)\n arcname=arcname.replace(os.sep,\"/\")\n arcname=arcname.lstrip(\"/\")\n \n \n \n tarinfo=self.tarinfo()\n tarinfo.tarfile=self\n \n \n \n if fileobj is None :\n if hasattr(os,\"lstat\")and not self.dereference:\n statres=os.lstat(name)\n else :\n statres=os.stat(name)\n else :\n statres=os.fstat(fileobj.fileno())\n linkname=\"\"\n \n stmd=statres.st_mode\n if stat.S_ISREG(stmd):\n inode=(statres.st_ino,statres.st_dev)\n if not self.dereference and statres.st_nlink >1 and\\\n inode in self.inodes and arcname !=self.inodes[inode]:\n \n \n type=LNKTYPE\n linkname=self.inodes[inode]\n else :\n \n \n type=REGTYPE\n if inode[0]:\n self.inodes[inode]=arcname\n elif stat.S_ISDIR(stmd):\n type=DIRTYPE\n elif stat.S_ISFIFO(stmd):\n type=FIFOTYPE\n elif stat.S_ISLNK(stmd):\n type=SYMTYPE\n linkname=os.readlink(name)\n elif stat.S_ISCHR(stmd):\n type=CHRTYPE\n elif stat.S_ISBLK(stmd):\n type=BLKTYPE\n else :\n return None\n \n \n \n tarinfo.name=arcname\n tarinfo.mode=stmd\n tarinfo.uid=statres.st_uid\n tarinfo.gid=statres.st_gid\n if type ==REGTYPE:\n tarinfo.size=statres.st_size\n else :\n tarinfo.size=0\n tarinfo.mtime=statres.st_mtime\n tarinfo.type=type\n tarinfo.linkname=linkname\n if pwd:\n try :\n tarinfo.uname=pwd.getpwuid(tarinfo.uid)[0]\n except KeyError:\n pass\n if grp:\n try :\n tarinfo.gname=grp.getgrgid(tarinfo.gid)[0]\n except KeyError:\n pass\n \n if type in (CHRTYPE,BLKTYPE):\n if hasattr(os,\"major\")and hasattr(os,\"minor\"):\n tarinfo.devmajor=os.major(statres.st_rdev)\n tarinfo.devminor=os.minor(statres.st_rdev)\n return tarinfo\n \n def list(self,verbose=True ,*,members=None ):\n ''\n\n\n\n \n self._check()\n \n if members is None :\n members=self\n for tarinfo in members:\n if verbose:\n _safe_print(stat.filemode(tarinfo.mode))\n _safe_print(\"%s/%s\"%(tarinfo.uname or tarinfo.uid,\n tarinfo.gname or tarinfo.gid))\n if tarinfo.ischr()or tarinfo.isblk():\n _safe_print(\"%10s\"%\n (\"%d,%d\"%(tarinfo.devmajor,tarinfo.devminor)))\n else :\n _safe_print(\"%10d\"%tarinfo.size)\n _safe_print(\"%d-%02d-%02d %02d:%02d:%02d\"\\\n %time.localtime(tarinfo.mtime)[:6])\n \n _safe_print(tarinfo.name+(\"/\"if tarinfo.isdir()else \"\"))\n \n if verbose:\n if tarinfo.issym():\n _safe_print(\"-> \"+tarinfo.linkname)\n if tarinfo.islnk():\n _safe_print(\"link to \"+tarinfo.linkname)\n print()\n \n def add(self,name,arcname=None ,recursive=True ,*,filter=None ):\n ''\n\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n if arcname is None :\n arcname=name\n \n \n if self.name is not None and os.path.abspath(name)==self.name:\n self._dbg(2,\"tarfile: Skipped %r\"%name)\n return\n \n self._dbg(1,name)\n \n \n tarinfo=self.gettarinfo(name,arcname)\n \n if tarinfo is None :\n self._dbg(1,\"tarfile: Unsupported type %r\"%name)\n return\n \n \n if filter is not None :\n tarinfo=filter(tarinfo)\n if tarinfo is None :\n self._dbg(2,\"tarfile: Excluded %r\"%name)\n return\n \n \n if tarinfo.isreg():\n with bltn_open(name,\"rb\")as f:\n self.addfile(tarinfo,f)\n \n elif tarinfo.isdir():\n self.addfile(tarinfo)\n if recursive:\n for f in sorted(os.listdir(name)):\n self.add(os.path.join(name,f),os.path.join(arcname,f),\n recursive,filter=filter)\n \n else :\n self.addfile(tarinfo)\n \n def addfile(self,tarinfo,fileobj=None ):\n ''\n\n\n\n \n self._check(\"awx\")\n \n tarinfo=copy.copy(tarinfo)\n \n buf=tarinfo.tobuf(self.format,self.encoding,self.errors)\n self.fileobj.write(buf)\n self.offset +=len(buf)\n bufsize=self.copybufsize\n \n if fileobj is not None :\n copyfileobj(fileobj,self.fileobj,tarinfo.size,bufsize=bufsize)\n blocks,remainder=divmod(tarinfo.size,BLOCKSIZE)\n if remainder >0:\n self.fileobj.write(NUL *(BLOCKSIZE -remainder))\n blocks +=1\n self.offset +=blocks *BLOCKSIZE\n \n self.members.append(tarinfo)\n \n def extractall(self,path=\".\",members=None ,*,numeric_owner=False ):\n ''\n\n\n\n\n\n \n directories=[]\n \n if members is None :\n members=self\n \n for tarinfo in members:\n if tarinfo.isdir():\n \n directories.append(tarinfo)\n tarinfo=copy.copy(tarinfo)\n tarinfo.mode=0o700\n \n self.extract(tarinfo,path,set_attrs=not tarinfo.isdir(),\n numeric_owner=numeric_owner)\n \n \n directories.sort(key=lambda a:a.name)\n directories.reverse()\n \n \n for tarinfo in directories:\n dirpath=os.path.join(path,tarinfo.name)\n try :\n self.chown(tarinfo,dirpath,numeric_owner=numeric_owner)\n self.utime(tarinfo,dirpath)\n self.chmod(tarinfo,dirpath)\n except ExtractError as e:\n if self.errorlevel >1:\n raise\n else :\n self._dbg(1,\"tarfile: %s\"%e)\n \n def extract(self,member,path=\"\",set_attrs=True ,*,numeric_owner=False ):\n ''\n\n\n\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else :\n tarinfo=member\n \n \n if tarinfo.islnk():\n tarinfo._link_target=os.path.join(path,tarinfo.linkname)\n \n try :\n self._extract_member(tarinfo,os.path.join(path,tarinfo.name),\n set_attrs=set_attrs,\n numeric_owner=numeric_owner)\n except OSError as e:\n if self.errorlevel >0:\n raise\n else :\n if e.filename is None :\n self._dbg(1,\"tarfile: %s\"%e.strerror)\n else :\n self._dbg(1,\"tarfile: %s %r\"%(e.strerror,e.filename))\n except ExtractError as e:\n if self.errorlevel >1:\n raise\n else :\n self._dbg(1,\"tarfile: %s\"%e)\n \n def extractfile(self,member):\n ''\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else :\n tarinfo=member\n \n if tarinfo.isreg()or tarinfo.type not in SUPPORTED_TYPES:\n \n return self.fileobject(self,tarinfo)\n \n elif tarinfo.islnk()or tarinfo.issym():\n if isinstance(self.fileobj,_Stream):\n \n \n \n raise StreamError(\"cannot extract (sym)link as file object\")\n else :\n \n return self.extractfile(self._find_link_target(tarinfo))\n else :\n \n \n return None\n \n def _extract_member(self,tarinfo,targetpath,set_attrs=True ,\n numeric_owner=False ):\n ''\n\n \n \n \n \n targetpath=targetpath.rstrip(\"/\")\n targetpath=targetpath.replace(\"/\",os.sep)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n \n \n os.makedirs(upperdirs)\n \n if tarinfo.islnk()or tarinfo.issym():\n self._dbg(1,\"%s -> %s\"%(tarinfo.name,tarinfo.linkname))\n else :\n self._dbg(1,tarinfo.name)\n \n if tarinfo.isreg():\n self.makefile(tarinfo,targetpath)\n elif tarinfo.isdir():\n self.makedir(tarinfo,targetpath)\n elif tarinfo.isfifo():\n self.makefifo(tarinfo,targetpath)\n elif tarinfo.ischr()or tarinfo.isblk():\n self.makedev(tarinfo,targetpath)\n elif tarinfo.islnk()or tarinfo.issym():\n self.makelink(tarinfo,targetpath)\n elif tarinfo.type not in SUPPORTED_TYPES:\n self.makeunknown(tarinfo,targetpath)\n else :\n self.makefile(tarinfo,targetpath)\n \n if set_attrs:\n self.chown(tarinfo,targetpath,numeric_owner)\n if not tarinfo.issym():\n self.chmod(tarinfo,targetpath)\n self.utime(tarinfo,targetpath)\n \n \n \n \n \n \n def makedir(self,tarinfo,targetpath):\n ''\n \n try :\n \n \n os.mkdir(targetpath,0o700)\n except FileExistsError:\n pass\n \n def makefile(self,tarinfo,targetpath):\n ''\n \n source=self.fileobj\n source.seek(tarinfo.offset_data)\n bufsize=self.copybufsize\n with bltn_open(targetpath,\"wb\")as target:\n if tarinfo.sparse is not None :\n for offset,size in tarinfo.sparse:\n target.seek(offset)\n copyfileobj(source,target,size,ReadError,bufsize)\n target.seek(tarinfo.size)\n target.truncate()\n else :\n copyfileobj(source,target,tarinfo.size,ReadError,bufsize)\n \n def makeunknown(self,tarinfo,targetpath):\n ''\n\n \n self.makefile(tarinfo,targetpath)\n self._dbg(1,\"tarfile: Unknown file type %r, \"\\\n \"extracted as regular file.\"%tarinfo.type)\n \n def makefifo(self,tarinfo,targetpath):\n ''\n \n if hasattr(os,\"mkfifo\"):\n os.mkfifo(targetpath)\n else :\n raise ExtractError(\"fifo not supported by system\")\n \n def makedev(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,\"mknod\")or not hasattr(os,\"makedev\"):\n raise ExtractError(\"special devices not supported by system\")\n \n mode=tarinfo.mode\n if tarinfo.isblk():\n mode |=stat.S_IFBLK\n else :\n mode |=stat.S_IFCHR\n \n os.mknod(targetpath,mode,\n os.makedev(tarinfo.devmajor,tarinfo.devminor))\n \n def makelink(self,tarinfo,targetpath):\n ''\n\n\n \n try :\n \n if tarinfo.issym():\n os.symlink(tarinfo.linkname,targetpath)\n else :\n \n if os.path.exists(tarinfo._link_target):\n os.link(tarinfo._link_target,targetpath)\n else :\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except symlink_exception:\n try :\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except KeyError:\n raise ExtractError(\"unable to resolve link inside archive\")\n \n def chown(self,tarinfo,targetpath,numeric_owner):\n ''\n\n\n\n \n if hasattr(os,\"geteuid\")and os.geteuid()==0:\n \n g=tarinfo.gid\n u=tarinfo.uid\n if not numeric_owner:\n try :\n if grp:\n g=grp.getgrnam(tarinfo.gname)[2]\n except KeyError:\n pass\n try :\n if pwd:\n u=pwd.getpwnam(tarinfo.uname)[2]\n except KeyError:\n pass\n try :\n if tarinfo.issym()and hasattr(os,\"lchown\"):\n os.lchown(targetpath,u,g)\n else :\n os.chown(targetpath,u,g)\n except OSError:\n raise ExtractError(\"could not change owner\")\n \n def chmod(self,tarinfo,targetpath):\n ''\n \n if hasattr(os,'chmod'):\n try :\n os.chmod(targetpath,tarinfo.mode)\n except OSError:\n raise ExtractError(\"could not change mode\")\n \n def utime(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,'utime'):\n return\n try :\n os.utime(targetpath,(tarinfo.mtime,tarinfo.mtime))\n except OSError:\n raise ExtractError(\"could not change modification time\")\n \n \n def next(self):\n ''\n\n\n \n self._check(\"ra\")\n if self.firstmember is not None :\n m=self.firstmember\n self.firstmember=None\n return m\n \n \n if self.offset !=self.fileobj.tell():\n self.fileobj.seek(self.offset -1)\n if not self.fileobj.read(1):\n raise ReadError(\"unexpected end of data\")\n \n \n tarinfo=None\n while True :\n try :\n tarinfo=self.tarinfo.fromtarfile(self)\n except EOFHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n except InvalidHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n elif self.offset ==0:\n raise ReadError(str(e))\n except EmptyHeaderError:\n if self.offset ==0:\n raise ReadError(\"empty file\")\n except TruncatedHeaderError as e:\n if self.offset ==0:\n raise ReadError(str(e))\n except SubsequentHeaderError as e:\n raise ReadError(str(e))\n break\n \n if tarinfo is not None :\n self.members.append(tarinfo)\n else :\n self._loaded=True\n \n return tarinfo\n \n \n \n \n def _getmember(self,name,tarinfo=None ,normalize=False ):\n ''\n\n \n \n members=self.getmembers()\n \n \n if tarinfo is not None :\n members=members[:members.index(tarinfo)]\n \n if normalize:\n name=os.path.normpath(name)\n \n for member in reversed(members):\n if normalize:\n member_name=os.path.normpath(member.name)\n else :\n member_name=member.name\n \n if name ==member_name:\n return member\n \n def _load(self):\n ''\n\n \n while True :\n tarinfo=self.next()\n if tarinfo is None :\n break\n self._loaded=True\n \n def _check(self,mode=None ):\n ''\n\n \n if self.closed:\n raise OSError(\"%s is closed\"%self.__class__.__name__)\n if mode is not None and self.mode not in mode:\n raise OSError(\"bad operation for mode %r\"%self.mode)\n \n def _find_link_target(self,tarinfo):\n ''\n\n \n if tarinfo.issym():\n \n linkname=\"/\".join(filter(None ,(os.path.dirname(tarinfo.name),tarinfo.linkname)))\n limit=None\n else :\n \n \n linkname=tarinfo.linkname\n limit=tarinfo\n \n member=self._getmember(linkname,tarinfo=limit,normalize=True )\n if member is None :\n raise KeyError(\"linkname %r not found\"%linkname)\n return member\n \n def __iter__(self):\n ''\n \n if self._loaded:\n yield from self.members\n return\n \n \n \n index=0\n \n \n \n if self.firstmember is not None :\n tarinfo=self.next()\n index +=1\n yield tarinfo\n \n while True :\n if index ',''),\n help='Extract tarfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create tarfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a tarfile is valid')\n args=parser.parse_args()\n \n if args.test is not None :\n src=args.test\n if is_tarfile(src):\n with open(src,'r')as tar:\n tar.getmembers()\n print(tar.getmembers(),file=sys.stderr)\n if args.verbose:\n print('{!r} is a tar archive.'.format(src))\n else :\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.list is not None :\n src=args.list\n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.list(verbose=args.verbose)\n else :\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.extract is not None :\n if len(args.extract)==1:\n src=args.extract[0]\n curdir=os.curdir\n elif len(args.extract)==2:\n src,curdir=args.extract\n else :\n parser.exit(1,parser.format_help())\n \n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.extractall(path=curdir)\n if args.verbose:\n if curdir =='.':\n msg='{!r} file is extracted.'.format(src)\n else :\n msg=('{!r} file is extracted '\n 'into {!r} directory.').format(src,curdir)\n print(msg)\n else :\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.create is not None :\n tar_name=args.create.pop(0)\n _,ext=os.path.splitext(tar_name)\n compressions={\n \n '.gz':'gz',\n '.tgz':'gz',\n \n '.xz':'xz',\n '.txz':'xz',\n \n '.bz2':'bz2',\n '.tbz':'bz2',\n '.tbz2':'bz2',\n '.tb2':'bz2',\n }\n tar_mode='w:'+compressions[ext]if ext in compressions else 'w'\n tar_files=args.create\n \n with TarFile.open(tar_name,tar_mode)as tf:\n for file_name in tar_files:\n tf.add(file_name)\n \n if args.verbose:\n print('{!r} file created.'.format(tar_name))\n \nif __name__ =='__main__':\n main()\n", ["argparse", "builtins", "bz2", "copy", "grp", "gzip", "io", "lzma", "os", "pwd", "re", "shutil", "stat", "struct", "sys", "time", "warnings", "zlib"]], "tb": [".py", "import sys\nfrom browser import console\n\nclass Trace:\n\n def __init__(self):\n self.buf=\"\"\n \n def write(self,*data):\n self.buf +=\" \".join([str(x)for x in data])\n \n def format(self):\n ''\n lines=self.buf.split(\"\\n\")\n stripped=[lines[0]]\n for i in range(3,len(lines),2):\n if __file__ in lines[i]:\n continue\n stripped +=lines[i:i+2]\n return \"\\n\".join(stripped)\n \ndef format_exc():\n trace=Trace()\n exc_info=sys.exc_info()\n exc_class=exc_info[0].__name__\n exc_msg=str(exc_info[1])\n tb=exc_info[2]\n if exc_info[0]is SyntaxError:\n return syntax_error(exc_info[1].args)\n trace.write(\"Traceback (most recent call last):\\n\")\n while tb is not None :\n frame=tb.tb_frame\n code=frame.f_code\n name=code.co_name\n filename=code.co_filename\n trace.write(f\" File {filename}, line {tb.tb_lineno}, in {name}\\n\")\n if not filename.startswith(\"<\"):\n trace.write(f\" {tb.tb_lasti}\\n\")\n tb=tb.tb_next\n trace.write(f\"{exc_class}: {exc_msg}\\n\")\n return trace.format()\n \ndef print_exc(file=None ):\n if file is None :\n file=sys.stderr\n file.write(format_exc())\n \ndef syntax_error(args):\n trace=Trace()\n info,filename,lineno,offset,line=args\n trace.write(f\" File {filename}, line {lineno}\\n\")\n trace.write(\" \"+line+\"\\n\")\n trace.write(\" \"+offset *\" \"+\"^\\n\")\n trace.write(\"SyntaxError:\",info,\"\\n\")\n return trace.buf\n", ["browser", "sys"]], "tempfile": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n\"NamedTemporaryFile\",\"TemporaryFile\",\n\"SpooledTemporaryFile\",\"TemporaryDirectory\",\n\"mkstemp\",\"mkdtemp\",\n\"mktemp\",\n\"TMP_MAX\",\"gettempprefix\",\n\"tempdir\",\"gettempdir\",\n\"gettempprefixb\",\"gettempdirb\",\n]\n\n\n\n\nimport functools as _functools\nimport warnings as _warnings\nimport io as _io\nimport os as _os\nimport shutil as _shutil\nimport errno as _errno\nfrom random import Random as _Random\nimport weakref as _weakref\nimport _thread\n_allocate_lock=_thread.allocate_lock\n\n_text_openflags=_os.O_RDWR |_os.O_CREAT |_os.O_EXCL\nif hasattr(_os,'O_NOFOLLOW'):\n _text_openflags |=_os.O_NOFOLLOW\n \n_bin_openflags=_text_openflags\nif hasattr(_os,'O_BINARY'):\n _bin_openflags |=_os.O_BINARY\n \nif hasattr(_os,'TMP_MAX'):\n TMP_MAX=_os.TMP_MAX\nelse :\n TMP_MAX=10000\n \n \n \n \n \ntemplate=\"tmp\"\n\n\n\n_once_lock=_allocate_lock()\n\nif hasattr(_os,\"lstat\"):\n _stat=_os.lstat\nelif hasattr(_os,\"stat\"):\n _stat=_os.stat\nelse :\n\n\n def _stat(fn):\n fd=_os.open(fn,_os.O_RDONLY)\n _os.close(fd)\n \ndef _exists(fn):\n try :\n _stat(fn)\n except OSError:\n return False\n else :\n return True\n \n \ndef _infer_return_type(*args):\n ''\n return_type=None\n for arg in args:\n if arg is None :\n continue\n if isinstance(arg,bytes):\n if return_type is str:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=bytes\n else :\n if return_type is bytes:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=str\n if return_type is None :\n return str\n return return_type\n \n \ndef _sanitize_params(prefix,suffix,dir):\n ''\n output_type=_infer_return_type(prefix,suffix,dir)\n if suffix is None :\n suffix=output_type()\n if prefix is None :\n if output_type is str:\n prefix=template\n else :\n prefix=_os.fsencode(template)\n if dir is None :\n if output_type is str:\n dir=gettempdir()\n else :\n dir=gettempdirb()\n return prefix,suffix,dir,output_type\n \n \nclass _RandomNameSequence:\n ''\n\n\n\n\n \n \n characters=\"abcdefghijklmnopqrstuvwxyz0123456789_\"\n \n @property\n def rng(self):\n cur_pid=_os.getpid()\n if cur_pid !=getattr(self,'_rng_pid',None ):\n self._rng=_Random()\n self._rng_pid=cur_pid\n return self._rng\n \n def __iter__(self):\n return self\n \n def __next__(self):\n c=self.characters\n choose=self.rng.choice\n letters=[choose(c)for dummy in range(8)]\n return ''.join(letters)\n \ndef _candidate_tempdir_list():\n ''\n \n \n dirlist=[]\n \n \n for envname in 'TMPDIR','TEMP','TMP':\n dirname=_os.getenv(envname)\n if dirname:dirlist.append(dirname)\n \n \n if _os.name =='nt':\n dirlist.extend([_os.path.expanduser(r'~\\AppData\\Local\\Temp'),\n _os.path.expandvars(r'%SYSTEMROOT%\\Temp'),\n r'c:\\temp',r'c:\\tmp',r'\\temp',r'\\tmp'])\n else :\n dirlist.extend(['/tmp','/var/tmp','/usr/tmp'])\n \n \n try :\n dirlist.append(_os.getcwd())\n except (AttributeError,OSError):\n dirlist.append(_os.curdir)\n \n return dirlist\n \ndef _get_default_tempdir():\n ''\n\n\n\n\n\n \n \n namer=_RandomNameSequence()\n dirlist=_candidate_tempdir_list()\n \n for dir in dirlist:\n if dir !=_os.curdir:\n dir=_os.path.abspath(dir)\n \n for seq in range(100):\n name=next(namer)\n filename=_os.path.join(dir,name)\n try :\n fd=_os.open(filename,_bin_openflags,0o600)\n try :\n try :\n with _io.open(fd,'wb',closefd=False )as fp:\n fp.write(b'blat')\n finally :\n _os.close(fd)\n finally :\n _os.unlink(filename)\n return dir\n except FileExistsError:\n pass\n except PermissionError:\n \n \n if (_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n break\n except OSError:\n break\n raise FileNotFoundError(_errno.ENOENT,\n \"No usable temporary directory found in %s\"%\n dirlist)\n \n_name_sequence=None\n\ndef _get_candidate_names():\n ''\n \n global _name_sequence\n if _name_sequence is None :\n _once_lock.acquire()\n try :\n if _name_sequence is None :\n _name_sequence=_RandomNameSequence()\n finally :\n _once_lock.release()\n return _name_sequence\n \n \ndef _mkstemp_inner(dir,pre,suf,flags,output_type):\n ''\n \n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,pre+name+suf)\n try :\n fd=_os.open(file,flags,0o600)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if (_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else :\n raise\n return (fd,_os.path.abspath(file))\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary file name found\")\n \n \n \n \ndef gettempprefix():\n ''\n return template\n \ndef gettempprefixb():\n ''\n return _os.fsencode(gettempprefix())\n \ntempdir=None\n\ndef gettempdir():\n ''\n global tempdir\n if tempdir is None :\n _once_lock.acquire()\n try :\n if tempdir is None :\n tempdir=_get_default_tempdir()\n finally :\n _once_lock.release()\n return tempdir\n \ndef gettempdirb():\n ''\n return _os.fsencode(gettempdir())\n \ndef mkstemp(suffix=None ,prefix=None ,dir=None ,text=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n if text:\n flags=_text_openflags\n else :\n flags=_bin_openflags\n \n return _mkstemp_inner(dir,prefix,suffix,flags,output_type)\n \n \ndef mkdtemp(suffix=None ,prefix=None ,dir=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n try :\n _os.mkdir(file,0o700)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if (_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else :\n raise\n return file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary directory name found\")\n \ndef mktemp(suffix=\"\",prefix=template,dir=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n if dir is None :\n dir=gettempdir()\n \n names=_get_candidate_names()\n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n if not _exists(file):\n return file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary filename found\")\n \n \nclass _TemporaryFileCloser:\n ''\n\n \n \n file=None\n close_called=False\n \n def __init__(self,file,name,delete=True ):\n self.file=file\n self.name=name\n self.delete=delete\n \n \n \n \n if _os.name !='nt':\n \n \n \n \n \n \n def close(self,unlink=_os.unlink):\n if not self.close_called and self.file is not None :\n self.close_called=True\n try :\n self.file.close()\n finally :\n if self.delete:\n unlink(self.name)\n \n \n def __del__(self):\n self.close()\n \n else :\n def close(self):\n if not self.close_called:\n self.close_called=True\n self.file.close()\n \n \nclass _TemporaryFileWrapper:\n ''\n\n\n\n\n \n \n def __init__(self,file,name,delete=True ):\n self.file=file\n self.name=name\n self.delete=delete\n self._closer=_TemporaryFileCloser(file,name,delete)\n \n def __getattr__(self,name):\n \n \n \n file=self.__dict__['file']\n a=getattr(file,name)\n if hasattr(a,'__call__'):\n func=a\n @_functools.wraps(func)\n def func_wrapper(*args,**kwargs):\n return func(*args,**kwargs)\n \n \n func_wrapper._closer=self._closer\n a=func_wrapper\n if not isinstance(a,int):\n setattr(self,name,a)\n return a\n \n \n \n def __enter__(self):\n self.file.__enter__()\n return self\n \n \n \n def __exit__(self,exc,value,tb):\n result=self.file.__exit__(exc,value,tb)\n self.close()\n return result\n \n def close(self):\n ''\n\n \n self._closer.close()\n \n \n def __iter__(self):\n \n \n \n \n \n for line in self.file:\n yield line\n \n \ndef NamedTemporaryFile(mode='w+b',buffering=-1,encoding=None ,\nnewline=None ,suffix=None ,prefix=None ,\ndir=None ,delete=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n \n \n \n if _os.name =='nt'and delete:\n flags |=_os.O_TEMPORARY\n \n (fd,name)=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n try :\n file=_io.open(fd,mode,buffering=buffering,\n newline=newline,encoding=encoding)\n \n return _TemporaryFileWrapper(file,name,delete)\n except BaseException:\n _os.unlink(name)\n _os.close(fd)\n raise\n \nif _os.name !='posix'or _os.sys.platform =='cygwin':\n\n\n TemporaryFile=NamedTemporaryFile\n \nelse :\n\n\n\n _O_TMPFILE_WORKS=hasattr(_os,'O_TMPFILE')\n \n def TemporaryFile(mode='w+b',buffering=-1,encoding=None ,\n newline=None ,suffix=None ,prefix=None ,\n dir=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n global _O_TMPFILE_WORKS\n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n if _O_TMPFILE_WORKS:\n try :\n flags2=(flags |_os.O_TMPFILE)&~_os.O_CREAT\n fd=_os.open(dir,flags2,0o600)\n except IsADirectoryError:\n \n \n \n \n \n _O_TMPFILE_WORKS=False\n except OSError:\n \n \n \n \n \n \n \n pass\n else :\n try :\n return _io.open(fd,mode,buffering=buffering,\n newline=newline,encoding=encoding)\n except :\n _os.close(fd)\n raise\n \n \n (fd,name)=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n try :\n _os.unlink(name)\n return _io.open(fd,mode,buffering=buffering,\n newline=newline,encoding=encoding)\n except :\n _os.close(fd)\n raise\n \nclass SpooledTemporaryFile:\n ''\n\n\n \n _rolled=False\n \n def __init__(self,max_size=0,mode='w+b',buffering=-1,\n encoding=None ,newline=None ,\n suffix=None ,prefix=None ,dir=None ):\n if 'b'in mode:\n self._file=_io.BytesIO()\n else :\n \n \n \n self._file=_io.StringIO(newline=\"\\n\")\n self._max_size=max_size\n self._rolled=False\n self._TemporaryFileArgs={'mode':mode,'buffering':buffering,\n 'suffix':suffix,'prefix':prefix,\n 'encoding':encoding,'newline':newline,\n 'dir':dir}\n \n def _check(self,file):\n if self._rolled:return\n max_size=self._max_size\n if max_size and file.tell()>max_size:\n self.rollover()\n \n def rollover(self):\n if self._rolled:return\n file=self._file\n newfile=self._file=TemporaryFile(**self._TemporaryFileArgs)\n del self._TemporaryFileArgs\n \n newfile.write(file.getvalue())\n newfile.seek(file.tell(),0)\n \n self._rolled=True\n \n \n \n \n \n \n \n def __enter__(self):\n if self._file.closed:\n raise ValueError(\"Cannot enter context with closed file\")\n return self\n \n def __exit__(self,exc,value,tb):\n self._file.close()\n \n \n def __iter__(self):\n return self._file.__iter__()\n \n def close(self):\n self._file.close()\n \n @property\n def closed(self):\n return self._file.closed\n \n @property\n def encoding(self):\n try :\n return self._file.encoding\n except AttributeError:\n if 'b'in self._TemporaryFileArgs['mode']:\n raise\n return self._TemporaryFileArgs['encoding']\n \n def fileno(self):\n self.rollover()\n return self._file.fileno()\n \n def flush(self):\n self._file.flush()\n \n def isatty(self):\n return self._file.isatty()\n \n @property\n def mode(self):\n try :\n return self._file.mode\n except AttributeError:\n return self._TemporaryFileArgs['mode']\n \n @property\n def name(self):\n try :\n return self._file.name\n except AttributeError:\n return None\n \n @property\n def newlines(self):\n try :\n return self._file.newlines\n except AttributeError:\n if 'b'in self._TemporaryFileArgs['mode']:\n raise\n return self._TemporaryFileArgs['newline']\n \n def read(self,*args):\n return self._file.read(*args)\n \n def readline(self,*args):\n return self._file.readline(*args)\n \n def readlines(self,*args):\n return self._file.readlines(*args)\n \n def seek(self,*args):\n self._file.seek(*args)\n \n @property\n def softspace(self):\n return self._file.softspace\n \n def tell(self):\n return self._file.tell()\n \n def truncate(self,size=None ):\n if size is None :\n self._file.truncate()\n else :\n if size >self._max_size:\n self.rollover()\n self._file.truncate(size)\n \n def write(self,s):\n file=self._file\n rv=file.write(s)\n self._check(file)\n return rv\n \n def writelines(self,iterable):\n file=self._file\n rv=file.writelines(iterable)\n self._check(file)\n return rv\n \n \nclass TemporaryDirectory(object):\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,suffix=None ,prefix=None ,dir=None ):\n self.name=mkdtemp(suffix,prefix,dir)\n self._finalizer=_weakref.finalize(\n self,self._cleanup,self.name,\n warn_message=\"Implicitly cleaning up {!r}\".format(self))\n \n @classmethod\n def _cleanup(cls,name,warn_message):\n _shutil.rmtree(name)\n _warnings.warn(warn_message,ResourceWarning)\n \n def __repr__(self):\n return \"<{} {!r}>\".format(self.__class__.__name__,self.name)\n \n def __enter__(self):\n return self.name\n \n def __exit__(self,exc,value,tb):\n self.cleanup()\n \n def cleanup(self):\n if self._finalizer.detach():\n _shutil.rmtree(self.name)\n", ["_thread", "errno", "functools", "io", "os", "random", "shutil", "warnings", "weakref"]], "textwrap": [".py", "''\n\n\n\n\n\n\nimport re\n\n__all__=['TextWrapper','wrap','fill','dedent','indent','shorten']\n\n\n\n\n_whitespace='\\t\\n\\x0b\\x0c\\r '\n\nclass TextWrapper:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n unicode_whitespace_trans={}\n uspace=ord(' ')\n for x in _whitespace:\n unicode_whitespace_trans[ord(x)]=uspace\n \n \n \n \n \n \n \n word_punct=r'[\\w!\"\\'&.,?]'\n letter=r'[^\\d\\W]'\n whitespace=r'[%s]'%re.escape(_whitespace)\n nowhitespace='[^'+whitespace[1:]\n wordsep_re=re.compile(r'''\n ( # any whitespace\n %(ws)s+\n | # em-dash between words\n (?<=%(wp)s) -{2,} (?=\\w)\n | # word, possibly hyphenated\n %(nws)s+? (?:\n # hyphenated word\n -(?: (?<=%(lt)s{2}-) | (?<=%(lt)s-%(lt)s-))\n (?= %(lt)s -? %(lt)s)\n | # end of word\n (?=%(ws)s|\\Z)\n | # em-dash\n (?<=%(wp)s) (?=-{2,}\\w)\n )\n )'''%{'wp':word_punct,'lt':letter,\n 'ws':whitespace,'nws':nowhitespace},\n re.VERBOSE)\n del word_punct,letter,nowhitespace\n \n \n \n \n \n wordsep_simple_re=re.compile(r'(%s+)'%whitespace)\n del whitespace\n \n \n \n sentence_end_re=re.compile(r'[a-z]'\n r'[\\.\\!\\?]'\n r'[\\\"\\']?'\n r'\\Z')\n \n def __init__(self,\n width=70,\n initial_indent=\"\",\n subsequent_indent=\"\",\n expand_tabs=True ,\n replace_whitespace=True ,\n fix_sentence_endings=False ,\n break_long_words=True ,\n drop_whitespace=True ,\n break_on_hyphens=True ,\n tabsize=8,\n *,\n max_lines=None ,\n placeholder=' [...]'):\n self.width=width\n self.initial_indent=initial_indent\n self.subsequent_indent=subsequent_indent\n self.expand_tabs=expand_tabs\n self.replace_whitespace=replace_whitespace\n self.fix_sentence_endings=fix_sentence_endings\n self.break_long_words=break_long_words\n self.drop_whitespace=drop_whitespace\n self.break_on_hyphens=break_on_hyphens\n self.tabsize=tabsize\n self.max_lines=max_lines\n self.placeholder=placeholder\n \n \n \n \n \n def _munge_whitespace(self,text):\n ''\n\n\n\n\n \n if self.expand_tabs:\n text=text.expandtabs(self.tabsize)\n if self.replace_whitespace:\n text=text.translate(self.unicode_whitespace_trans)\n return text\n \n \n def _split(self,text):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.break_on_hyphens is True :\n chunks=self.wordsep_re.split(text)\n else :\n chunks=self.wordsep_simple_re.split(text)\n chunks=[c for c in chunks if c]\n return chunks\n \n def _fix_sentence_endings(self,chunks):\n ''\n\n\n\n\n\n\n \n i=0\n patsearch=self.sentence_end_re.search\n while i 0)\"%self.width)\n if self.max_lines is not None :\n if self.max_lines >1:\n indent=self.subsequent_indent\n else :\n indent=self.initial_indent\n if len(indent)+len(self.placeholder.lstrip())>self.width:\n raise ValueError(\"placeholder too large for max width\")\n \n \n \n chunks.reverse()\n \n while chunks:\n \n \n \n cur_line=[]\n cur_len=0\n \n \n if lines:\n indent=self.subsequent_indent\n else :\n indent=self.initial_indent\n \n \n width=self.width -len(indent)\n \n \n \n if self.drop_whitespace and chunks[-1].strip()==''and lines:\n del chunks[-1]\n \n while chunks:\n l=len(chunks[-1])\n \n \n if cur_len+l <=width:\n cur_line.append(chunks.pop())\n cur_len +=l\n \n \n else :\n break\n \n \n \n if chunks and len(chunks[-1])>width:\n self._handle_long_word(chunks,cur_line,cur_len,width)\n cur_len=sum(map(len,cur_line))\n \n \n if self.drop_whitespace and cur_line and cur_line[-1].strip()=='':\n cur_len -=len(cur_line[-1])\n del cur_line[-1]\n \n if cur_line:\n if (self.max_lines is None or\n len(lines)+1 \"%(\n \"locked\"if self._block.locked()else \"unlocked\",\n self.__class__.__module__,\n self.__class__.__qualname__,\n owner,\n self._count,\n hex(id(self))\n )\n \n def acquire(self,blocking=True ,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n me=get_ident()\n if self._owner ==me:\n self._count +=1\n return 1\n rc=self._block.acquire(blocking,timeout)\n if rc:\n self._owner=me\n self._count=1\n return rc\n \n __enter__=acquire\n \n def release(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._owner !=get_ident():\n raise RuntimeError(\"cannot release un-acquired lock\")\n self._count=count=self._count -1\n if not count:\n self._owner=None\n self._block.release()\n \n def __exit__(self,t,v,tb):\n self.release()\n \n \n \n def _acquire_restore(self,state):\n self._block.acquire()\n self._count,self._owner=state\n \n def _release_save(self):\n if self._count ==0:\n raise RuntimeError(\"cannot release un-acquired lock\")\n count=self._count\n self._count=0\n owner=self._owner\n self._owner=None\n self._block.release()\n return (count,owner)\n \n def _is_owned(self):\n return self._owner ==get_ident()\n \n_PyRLock=_RLock\n\n\nclass Condition:\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,lock=None ):\n if lock is None :\n lock=RLock()\n self._lock=lock\n \n self.acquire=lock.acquire\n self.release=lock.release\n \n \n \n try :\n self._release_save=lock._release_save\n except AttributeError:\n pass\n try :\n self._acquire_restore=lock._acquire_restore\n except AttributeError:\n pass\n try :\n self._is_owned=lock._is_owned\n except AttributeError:\n pass\n self._waiters=_deque()\n \n def __enter__(self):\n return self._lock.__enter__()\n \n def __exit__(self,*args):\n return self._lock.__exit__(*args)\n \n def __repr__(self):\n return \"\"%(self._lock,len(self._waiters))\n \n def _release_save(self):\n self._lock.release()\n \n def _acquire_restore(self,x):\n self._lock.acquire()\n \n def _is_owned(self):\n \n \n if self._lock.acquire(0):\n self._lock.release()\n return False\n else :\n return True\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not self._is_owned():\n raise RuntimeError(\"cannot wait on un-acquired lock\")\n waiter=_allocate_lock()\n waiter.acquire()\n self._waiters.append(waiter)\n saved_state=self._release_save()\n gotit=False\n try :\n if timeout is None :\n waiter.acquire()\n gotit=True\n else :\n if timeout >0:\n gotit=waiter.acquire(True ,timeout)\n else :\n gotit=waiter.acquire(False )\n return gotit\n finally :\n self._acquire_restore(saved_state)\n if not gotit:\n try :\n self._waiters.remove(waiter)\n except ValueError:\n pass\n \n def wait_for(self,predicate,timeout=None ):\n ''\n\n\n\n\n\n \n endtime=None\n waittime=timeout\n result=predicate()\n while not result:\n if waittime is not None :\n if endtime is None :\n endtime=_time()+waittime\n else :\n waittime=endtime -_time()\n if waittime <=0:\n break\n self.wait(waittime)\n result=predicate()\n return result\n \n def notify(self,n=1):\n ''\n\n\n\n\n\n\n\n \n if not self._is_owned():\n raise RuntimeError(\"cannot notify on un-acquired lock\")\n all_waiters=self._waiters\n waiters_to_notify=_deque(_islice(all_waiters,n))\n if not waiters_to_notify:\n return\n for waiter in waiters_to_notify:\n waiter.release()\n try :\n all_waiters.remove(waiter)\n except ValueError:\n pass\n \n def notify_all(self):\n ''\n\n\n\n\n \n self.notify(len(self._waiters))\n \n notifyAll=notify_all\n \n \nclass Semaphore:\n ''\n\n\n\n\n\n\n \n \n \n \n def __init__(self,value=1):\n if value <0:\n raise ValueError(\"semaphore initial value must be >= 0\")\n self._cond=Condition(Lock())\n self._value=value\n \n def acquire(self,blocking=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not blocking and timeout is not None :\n raise ValueError(\"can't specify timeout for non-blocking acquire\")\n rc=False\n endtime=None\n with self._cond:\n while self._value ==0:\n if not blocking:\n break\n if timeout is not None :\n if endtime is None :\n endtime=_time()+timeout\n else :\n timeout=endtime -_time()\n if timeout <=0:\n break\n self._cond.wait(timeout)\n else :\n self._value -=1\n rc=True\n return rc\n \n __enter__=acquire\n \n def release(self):\n ''\n\n\n\n\n \n with self._cond:\n self._value +=1\n self._cond.notify()\n \n def __exit__(self,t,v,tb):\n self.release()\n \n \nclass BoundedSemaphore(Semaphore):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,value=1):\n Semaphore.__init__(self,value)\n self._initial_value=value\n \n def release(self):\n ''\n\n\n\n\n\n\n\n \n with self._cond:\n if self._value >=self._initial_value:\n raise ValueError(\"Semaphore released too many times\")\n self._value +=1\n self._cond.notify()\n \n \nclass Event:\n ''\n\n\n\n\n\n \n \n \n \n def __init__(self):\n self._cond=Condition(Lock())\n self._flag=False\n \n def _reset_internal_locks(self):\n \n self._cond.__init__(Lock())\n \n def is_set(self):\n ''\n return self._flag\n \n isSet=is_set\n \n def set(self):\n ''\n\n\n\n\n \n with self._cond:\n self._flag=True\n self._cond.notify_all()\n \n def clear(self):\n ''\n\n\n\n\n \n with self._cond:\n self._flag=False\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n with self._cond:\n signaled=self._flag\n if not signaled:\n signaled=self._cond.wait(timeout)\n return signaled\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Barrier:\n ''\n\n\n\n\n\n \n \n def __init__(self,parties,action=None ,timeout=None ):\n ''\n\n\n\n\n\n\n \n self._cond=Condition(Lock())\n self._action=action\n self._timeout=timeout\n self._parties=parties\n self._state=0\n self._count=0\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n \n if timeout is None :\n timeout=self._timeout\n with self._cond:\n self._enter()\n index=self._count\n self._count +=1\n try :\n if index+1 ==self._parties:\n \n self._release()\n else :\n \n self._wait(timeout)\n return index\n finally :\n self._count -=1\n \n self._exit()\n \n \n \n def _enter(self):\n while self._state in (-1,1):\n \n self._cond.wait()\n \n if self._state <0:\n raise BrokenBarrierError\n assert self._state ==0\n \n \n \n def _release(self):\n try :\n if self._action:\n self._action()\n \n self._state=1\n self._cond.notify_all()\n except :\n \n self._break()\n raise\n \n \n \n def _wait(self,timeout):\n if not self._cond.wait_for(lambda :self._state !=0,timeout):\n \n self._break()\n raise BrokenBarrierError\n if self._state <0:\n raise BrokenBarrierError\n assert self._state ==1\n \n \n \n def _exit(self):\n if self._count ==0:\n if self._state in (-1,1):\n \n self._state=0\n self._cond.notify_all()\n \n def reset(self):\n ''\n\n\n\n\n \n with self._cond:\n if self._count >0:\n if self._state ==0:\n \n self._state=-1\n elif self._state ==-2:\n \n \n self._state=-1\n else :\n self._state=0\n self._cond.notify_all()\n \n def abort(self):\n ''\n\n\n\n\n \n with self._cond:\n self._break()\n \n def _break(self):\n \n \n self._state=-2\n self._cond.notify_all()\n \n @property\n def parties(self):\n ''\n return self._parties\n \n @property\n def n_waiting(self):\n ''\n \n \n if self._state ==0:\n return self._count\n return 0\n \n @property\n def broken(self):\n ''\n return self._state ==-2\n \n \nclass BrokenBarrierError(RuntimeError):\n pass\n \n \n \n_counter=_count().__next__\n_counter()\ndef _newname(template=\"Thread-%d\"):\n return template %_counter()\n \n \n_active_limbo_lock=_allocate_lock()\n_active={}\n_limbo={}\n_dangling=WeakSet()\n\n\n\nclass Thread:\n ''\n\n\n\n\n\n \n \n _initialized=False\n \n \n \n \n _exc_info=_sys.exc_info\n \n \n \n \n def __init__(self,group=None ,target=None ,name=None ,\n args=(),kwargs=None ,*,daemon=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n assert group is None ,\"group argument must be None for now\"\n if kwargs is None :\n kwargs={}\n self._target=target\n self._name=str(name or _newname())\n self._args=args\n self._kwargs=kwargs\n if daemon is not None :\n self._daemonic=daemon\n else :\n self._daemonic=current_thread().daemon\n self._ident=None\n self._tstate_lock=None\n self._started=Event()\n self._is_stopped=False\n self._initialized=True\n \n \n self._stderr=_sys.stderr\n \n _dangling.add(self)\n \n def _reset_internal_locks(self,is_alive):\n \n \n self._started._reset_internal_locks()\n if is_alive:\n self._set_tstate_lock()\n else :\n \n \n self._is_stopped=True\n self._tstate_lock=None\n \n def __repr__(self):\n assert self._initialized,\"Thread.__init__() was not called\"\n status=\"initial\"\n if self._started.is_set():\n status=\"started\"\n self.is_alive()\n if self._is_stopped:\n status=\"stopped\"\n if self._daemonic:\n status +=\" daemon\"\n if self._ident is not None :\n status +=\" %s\"%self._ident\n return \"<%s(%s, %s)>\"%(self.__class__.__name__,self._name,status)\n \n def start(self):\n ''\n\n\n\n\n\n\n\n \n if not self._initialized:\n raise RuntimeError(\"thread.__init__() not called\")\n \n if self._started.is_set():\n raise RuntimeError(\"threads can only be started once\")\n with _active_limbo_lock:\n _limbo[self]=self\n try :\n _start_new_thread(self._bootstrap,())\n except Exception:\n with _active_limbo_lock:\n del _limbo[self]\n raise\n self._started.wait()\n \n def run(self):\n ''\n\n\n\n\n\n\n \n try :\n if self._target:\n self._target(*self._args,**self._kwargs)\n finally :\n \n \n del self._target,self._args,self._kwargs\n \n def _bootstrap(self):\n \n \n \n \n \n \n \n \n \n \n \n \n try :\n self._bootstrap_inner()\n except :\n if self._daemonic and _sys is None :\n return\n raise\n \n def _set_ident(self):\n self._ident=get_ident()\n \n def _set_tstate_lock(self):\n ''\n\n\n \n self._tstate_lock=_set_sentinel()\n self._tstate_lock.acquire()\n \n def _bootstrap_inner(self):\n try :\n self._set_ident()\n self._set_tstate_lock()\n self._started.set()\n with _active_limbo_lock:\n _active[self._ident]=self\n del _limbo[self]\n \n if _trace_hook:\n _sys.settrace(_trace_hook)\n if _profile_hook:\n _sys.setprofile(_profile_hook)\n \n try :\n self.run()\n except SystemExit:\n pass\n except :\n \n \n \n \n if _sys and _sys.stderr is not None :\n print(\"Exception in thread %s:\\n%s\"%\n (self.name,_format_exc()),file=_sys.stderr)\n elif self._stderr is not None :\n \n \n \n exc_type,exc_value,exc_tb=self._exc_info()\n try :\n print((\n \"Exception in thread \"+self.name+\n \" (most likely raised during interpreter shutdown):\"),file=self._stderr)\n print((\n \"Traceback (most recent call last):\"),file=self._stderr)\n while exc_tb:\n print((\n ' File \"%s\", line %s, in %s'%\n (exc_tb.tb_frame.f_code.co_filename,\n exc_tb.tb_lineno,\n exc_tb.tb_frame.f_code.co_name)),file=self._stderr)\n exc_tb=exc_tb.tb_next\n print((\"%s: %s\"%(exc_type,exc_value)),file=self._stderr)\n self._stderr.flush()\n \n \n finally :\n del exc_type,exc_value,exc_tb\n finally :\n \n \n \n \n \n pass\n finally :\n with _active_limbo_lock:\n try :\n \n \n del _active[get_ident()]\n except :\n pass\n \n def _stop(self):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n lock=self._tstate_lock\n if lock is not None :\n assert not lock.locked()\n self._is_stopped=True\n self._tstate_lock=None\n \n def _delete(self):\n ''\n with _active_limbo_lock:\n del _active[get_ident()]\n \n \n \n \n \n def join(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not self._initialized:\n raise RuntimeError(\"Thread.__init__() not called\")\n if not self._started.is_set():\n raise RuntimeError(\"cannot join thread before it is started\")\n if self is current_thread():\n raise RuntimeError(\"cannot join current thread\")\n \n if timeout is None :\n self._wait_for_tstate_lock()\n else :\n \n \n self._wait_for_tstate_lock(timeout=max(timeout,0))\n \n def _wait_for_tstate_lock(self,block=True ,timeout=-1):\n \n \n \n \n \n \n lock=self._tstate_lock\n if lock is None :\n assert self._is_stopped\n elif lock.acquire(block,timeout):\n lock.release()\n self._stop()\n \n @property\n def name(self):\n ''\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._name\n \n @name.setter\n def name(self,name):\n assert self._initialized,\"Thread.__init__() not called\"\n self._name=str(name)\n \n @property\n def ident(self):\n ''\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._ident\n \n def is_alive(self):\n ''\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n if self._is_stopped or not self._started.is_set():\n return False\n self._wait_for_tstate_lock(False )\n return not self._is_stopped\n \n isAlive=is_alive\n \n @property\n def daemon(self):\n ''\n\n\n\n\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._daemonic\n \n @daemon.setter\n def daemon(self,daemonic):\n if not self._initialized:\n raise RuntimeError(\"Thread.__init__() not called\")\n if self._started.is_set():\n raise RuntimeError(\"cannot set daemon status of active thread\")\n self._daemonic=daemonic\n \n def isDaemon(self):\n return self.daemon\n \n def setDaemon(self,daemonic):\n self.daemon=daemonic\n \n def getName(self):\n return self.name\n \n def setName(self,name):\n self.name=name\n \n \n \nclass Timer(Thread):\n ''\n\n\n\n\n\n \n \n def __init__(self,interval,function,args=None ,kwargs=None ):\n Thread.__init__(self)\n self.interval=interval\n self.function=function\n self.args=args if args is not None else []\n self.kwargs=kwargs if kwargs is not None else {}\n self.finished=Event()\n \n def cancel(self):\n ''\n self.finished.set()\n \n def run(self):\n self.finished.wait(self.interval)\n if not self.finished.is_set():\n self.function(*self.args,**self.kwargs)\n self.finished.set()\n \n \n \n \nclass _MainThread(Thread):\n\n def __init__(self):\n Thread.__init__(self,name=\"MainThread\",daemon=False )\n self._set_tstate_lock()\n self._started.set()\n self._set_ident()\n with _active_limbo_lock:\n _active[self._ident]=self\n \n \n \n \n \n \n \n \n \n \nclass _DummyThread(Thread):\n\n def __init__(self):\n Thread.__init__(self,name=_newname(\"Dummy-%d\"),daemon=True )\n \n self._started.set()\n self._set_ident()\n with _active_limbo_lock:\n _active[self._ident]=self\n \n def _stop(self):\n pass\n \n def is_alive(self):\n assert not self._is_stopped and self._started.is_set()\n return True\n \n def join(self,timeout=None ):\n assert False ,\"cannot join a dummy thread\"\n \n \n \n \ndef current_thread():\n ''\n\n\n\n\n \n try :\n return _active[get_ident()]\n except KeyError:\n return _DummyThread()\n \ncurrentThread=current_thread\n\ndef active_count():\n ''\n\n\n\n\n \n with _active_limbo_lock:\n return len(_active)+len(_limbo)\n \nactiveCount=active_count\n\ndef _enumerate():\n\n return list(_active.values())+list(_limbo.values())\n \ndef enumerate():\n ''\n\n\n\n\n\n \n with _active_limbo_lock:\n return list(_active.values())+list(_limbo.values())\n \nfrom _thread import stack_size\n\n\n\n\n\n_main_thread=_MainThread()\n\ndef _shutdown():\n\n\n\n\n\n if _main_thread._is_stopped:\n \n return\n tlock=_main_thread._tstate_lock\n \n \n assert tlock is not None\n assert tlock.locked()\n tlock.release()\n _main_thread._stop()\n t=_pickSomeNonDaemonThread()\n while t:\n t.join()\n t=_pickSomeNonDaemonThread()\n \ndef _pickSomeNonDaemonThread():\n for t in enumerate():\n if not t.daemon and t.is_alive():\n return t\n return None\n \ndef main_thread():\n ''\n\n\n\n \n return _main_thread\n \n \n \n \ntry :\n from _thread import _local as local\nexcept ImportError:\n from _threading_local import local\n \n \ndef _after_fork():\n ''\n\n \n \n \n global _active_limbo_lock,_main_thread\n _active_limbo_lock=_allocate_lock()\n \n \n new_active={}\n current=current_thread()\n _main_thread=current\n with _active_limbo_lock:\n \n \n threads=set(_enumerate())\n threads.update(_dangling)\n for thread in threads:\n \n \n if thread is current:\n \n \n thread._reset_internal_locks(True )\n ident=get_ident()\n thread._ident=ident\n new_active[ident]=thread\n else :\n \n thread._reset_internal_locks(False )\n thread._stop()\n \n _limbo.clear()\n _active.clear()\n _active.update(new_active)\n assert len(_active)==1\n \n \nif hasattr(_os,\"register_at_fork\"):\n _os.register_at_fork(after_in_child=_after_fork)\n", ["_collections", "_thread", "_threading_local", "_weakrefset", "collections", "itertools", "os", "sys", "time", "traceback"]], "time": [".py", "import _locale\nimport javascript\n\n\ndate=javascript.Date.new\nnow=javascript.Date.now\n\n\n\n\n\n\n\n_STRUCT_TM_ITEMS=9\n\n\n\n\n\ndef _get_day_of_year(arg):\n ''\n\n\n\n\n\n\n\n\n\n \n ml=[31,28,31,30,31,30,31,31,30,31,30,31]\n if arg[0]%4 ==0:\n ml[1]+=1\n i=1\n yday=0\n while i mm >13:\n raise ValueError(\"month out of range\")\n \n dd=t[2]\n if dd ==0:dd=1\n if -1 >dd >32:\n raise ValueError(\"day of month out of range\")\n \n hh=t[3]\n if -1 >hh >24:\n raise ValueError(\"hour out of range\")\n \n minu=t[4]\n if -1 >minu >60:\n raise ValueError(\"minute out of range\")\n \n ss=t[5]\n if -1 >ss >62:\n raise ValueError(\"seconds out of range\")\n \n wd=t[6]%7\n if wd <-2:\n raise ValueError(\"day of week out of range\")\n \n dy=t[7]\n if dy ==0:dy=1\n if -1 >dy >367:\n raise ValueError(\"day of year out of range\")\n \n return t[0],mm,dd,hh,minu,ss,wd,dy,t[-1]\n \n \ndef _is_dst(secs=None ):\n ''\n d=date()\n if secs is not None :\n d=date(secs *1000)\n \n \n \n jan=date(d.getFullYear(),0,1)\n jul=date(d.getFullYear(),6,1)\n dst=int(d.getTimezoneOffset()=0 else 6\n tmp=struct_time([d.getUTCFullYear(),\n d.getUTCMonth()+1,d.getUTCDate(),\n d.getUTCHours(),d.getUTCMinutes(),d.getUTCSeconds(),\n wday,0,0])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef localtime(secs=None ):\n d=date()\n if secs is not None :\n d=date(secs *1000)\n dst=_is_dst(secs)\n wday=d.getDay()-1 if d.getDay()-1 >=0 else 6\n tmp=struct_time([d.getFullYear(),\n d.getMonth()+1,d.getDate(),\n d.getHours(),d.getMinutes(),d.getSeconds(),\n wday,0,dst])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef mktime(t):\n if isinstance(t,struct_time):\n d1=date(t.tm_year,t.tm_mon -1,t.tm_mday,\n t.tm_hour,t.tm_min,t.tm_sec,0).getTime()\n elif isinstance(t,tuple):\n d1=date(t[0],t[1]-1,t[2],t[3],t[4],t[5],0).getTime()\n else :\n raise ValueError(\"Tuple or struct_time argument required\")\n d2=date(0).getTime()\n return (d1 -d2)/1000.\n \ndef monotonic():\n return now()/1000.\n \ndef perf_counter():\n return now()/1000.\n \ndef process_time():\n return now()/1000.\n \ndef time():\n return float(date().getTime()/1000)\n \ndef sleep(secs):\n ''\n\n \n raise NotImplementedError(\"Blocking functions like time.sleep() are not \"\n \"supported in the browser. Use functions in module browser.timer \"\n \"instead.\")\n \ndef strftime(_format,t=None ):\n def ns(t,nb):\n \n res=str(t)\n while len(res) float\n repeat(string, string) -> list\n default_timer() -> float\n\n\"\"\"\n\nimport gc\nimport sys\nimport time\nimport itertools\n\n__all__=[\"Timer\",\"timeit\",\"repeat\",\"default_timer\"]\n\ndummy_src_name=\"\"\ndefault_number=1000000\ndefault_repeat=5\ndefault_timer=time.perf_counter\n\n_globals=globals\n\n\n\n\ntemplate=\"\"\"\ndef inner(_it, _timer{init}):\n {setup}\n _t0 = _timer()\n for _i in _it:\n {stmt}\n _t1 = _timer()\n return _t1 - _t0\n\"\"\"\n\ndef reindent(src,indent):\n ''\n return src.replace(\"\\n\",\"\\n\"+\" \"*indent)\n \nclass Timer:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,stmt=\"pass\",setup=\"pass\",timer=default_timer,\n globals=None ):\n ''\n self.timer=timer\n local_ns={}\n global_ns=_globals()if globals is None else globals\n init=''\n if isinstance(setup,str):\n \n compile(setup,dummy_src_name,\"exec\")\n stmtprefix=setup+'\\n'\n setup=reindent(setup,4)\n elif callable(setup):\n local_ns['_setup']=setup\n init +=', _setup=_setup'\n stmtprefix=''\n setup='_setup()'\n else :\n raise ValueError(\"setup is neither a string nor callable\")\n if isinstance(stmt,str):\n \n compile(stmtprefix+stmt,dummy_src_name,\"exec\")\n stmt=reindent(stmt,8)\n elif callable(stmt):\n local_ns['_stmt']=stmt\n init +=', _stmt=_stmt'\n stmt='_stmt()'\n else :\n raise ValueError(\"stmt is neither a string nor callable\")\n src=template.format(stmt=stmt,setup=setup,init=init)\n self.src=src\n code=compile(src,dummy_src_name,\"exec\")\n exec(code,global_ns,local_ns)\n self.inner=local_ns[\"inner\"]\n \n def print_exc(self,file=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n import linecache,traceback\n if self.src is not None :\n linecache.cache[dummy_src_name]=(len(self.src),\n None ,\n self.src.split(\"\\n\"),\n dummy_src_name)\n \n \n traceback.print_exc(file=file)\n \n def timeit(self,number=default_number):\n ''\n\n\n\n\n\n\n\n \n it=itertools.repeat(None ,number)\n gcold=gc.isenabled()\n gc.disable()\n try :\n timing=self.inner(it,self.timer)\n finally :\n if gcold:\n gc.enable()\n return timing\n \n def repeat(self,repeat=default_repeat,number=default_number):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n r=[]\n for i in range(repeat):\n t=self.timeit(number)\n r.append(t)\n return r\n \n def autorange(self,callback=None ):\n ''\n\n\n\n\n\n\n\n \n i=1\n while True :\n for j in 1,2,5:\n number=i *j\n time_taken=self.timeit(number)\n if callback:\n callback(number,time_taken)\n if time_taken >=0.2:\n return (number,time_taken)\n i *=10\n \ndef timeit(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nnumber=default_number,globals=None ):\n ''\n return Timer(stmt,setup,timer,globals).timeit(number)\n \ndef repeat(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nrepeat=default_repeat,number=default_number,globals=None ):\n ''\n return Timer(stmt,setup,timer,globals).repeat(repeat,number)\n \ndef main(args=None ,*,_wrap_timer=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if args is None :\n args=sys.argv[1:]\n import getopt\n try :\n opts,args=getopt.getopt(args,\"n:u:s:r:tcpvh\",\n [\"number=\",\"setup=\",\"repeat=\",\n \"time\",\"clock\",\"process\",\n \"verbose\",\"unit=\",\"help\"])\n except getopt.error as err:\n print(err)\n print(\"use -h/--help for command line help\")\n return 2\n \n timer=default_timer\n stmt=\"\\n\".join(args)or \"pass\"\n number=0\n setup=[]\n repeat=default_repeat\n verbose=0\n time_unit=None\n units={\"nsec\":1e-9,\"usec\":1e-6,\"msec\":1e-3,\"sec\":1.0}\n precision=3\n for o,a in opts:\n if o in (\"-n\",\"--number\"):\n number=int(a)\n if o in (\"-s\",\"--setup\"):\n setup.append(a)\n if o in (\"-u\",\"--unit\"):\n if a in units:\n time_unit=a\n else :\n print(\"Unrecognized unit. Please select nsec, usec, msec, or sec.\",\n file=sys.stderr)\n return 2\n if o in (\"-r\",\"--repeat\"):\n repeat=int(a)\n if repeat <=0:\n repeat=1\n if o in (\"-p\",\"--process\"):\n timer=time.process_time\n if o in (\"-v\",\"--verbose\"):\n if verbose:\n precision +=1\n verbose +=1\n if o in (\"-h\",\"--help\"):\n print(__doc__,end=' ')\n return 0\n setup=\"\\n\".join(setup)or \"pass\"\n \n \n \n \n import os\n sys.path.insert(0,os.curdir)\n if _wrap_timer is not None :\n timer=_wrap_timer(timer)\n \n t=Timer(stmt,setup,timer)\n if number ==0:\n \n callback=None\n if verbose:\n def callback(number,time_taken):\n msg=\"{num} loop{s} -> {secs:.{prec}g} secs\"\n plural=(number !=1)\n print(msg.format(num=number,s='s'if plural else '',\n secs=time_taken,prec=precision))\n try :\n number,_=t.autorange(callback)\n except :\n t.print_exc()\n return 1\n \n if verbose:\n print()\n \n try :\n raw_timings=t.repeat(repeat,number)\n except :\n t.print_exc()\n return 1\n \n def format_time(dt):\n unit=time_unit\n \n if unit is not None :\n scale=units[unit]\n else :\n scales=[(scale,unit)for unit,scale in units.items()]\n scales.sort(reverse=True )\n for scale,unit in scales:\n if dt >=scale:\n break\n \n return \"%.*g %s\"%(precision,dt /scale,unit)\n \n if verbose:\n print(\"raw times: %s\"%\", \".join(map(format_time,raw_timings)))\n print()\n timings=[dt /number for dt in raw_timings]\n \n best=min(timings)\n print(\"%d loop%s, best of %d: %s per loop\"\n %(number,'s'if number !=1 else '',\n repeat,format_time(best)))\n \n best=min(timings)\n worst=max(timings)\n if worst >=best *4:\n import warnings\n warnings.warn_explicit(\"The test results are likely unreliable. \"\n \"The worst time (%s) was more than four times \"\n \"slower than the best time (%s).\"\n %(format_time(worst),format_time(best)),\n UserWarning,'',0)\n return None\n \nif __name__ ==\"__main__\":\n sys.exit(main())\n", ["gc", "getopt", "itertools", "linecache", "os", "sys", "time", "traceback", "warnings"]], "token": [".py", "''\n\n__all__=['tok_name','ISTERMINAL','ISNONTERMINAL','ISEOF']\n\n\n\n\n\n\n\n\n\nENDMARKER=0\nNAME=1\nNUMBER=2\nSTRING=3\nNEWLINE=4\nINDENT=5\nDEDENT=6\nLPAR=7\nRPAR=8\nLSQB=9\nRSQB=10\nCOLON=11\nCOMMA=12\nSEMI=13\nPLUS=14\nMINUS=15\nSTAR=16\nSLASH=17\nVBAR=18\nAMPER=19\nLESS=20\nGREATER=21\nEQUAL=22\nDOT=23\nPERCENT=24\nLBRACE=25\nRBRACE=26\nEQEQUAL=27\nNOTEQUAL=28\nLESSEQUAL=29\nGREATEREQUAL=30\nTILDE=31\nCIRCUMFLEX=32\nLEFTSHIFT=33\nRIGHTSHIFT=34\nDOUBLESTAR=35\nPLUSEQUAL=36\nMINEQUAL=37\nSTAREQUAL=38\nSLASHEQUAL=39\nPERCENTEQUAL=40\nAMPEREQUAL=41\nVBAREQUAL=42\nCIRCUMFLEXEQUAL=43\nLEFTSHIFTEQUAL=44\nRIGHTSHIFTEQUAL=45\nDOUBLESTAREQUAL=46\nDOUBLESLASH=47\nDOUBLESLASHEQUAL=48\nAT=49\nATEQUAL=50\nRARROW=51\nELLIPSIS=52\n\nOP=53\nERRORTOKEN=54\n\nCOMMENT=55\nNL=56\nENCODING=57\nN_TOKENS=58\n\nNT_OFFSET=256\n\n\ntok_name={value:name\nfor name,value in globals().items()\nif isinstance(value,int)and not name.startswith('_')}\n__all__.extend(tok_name.values())\n\ndef ISTERMINAL(x):\n return x =NT_OFFSET\n \ndef ISEOF(x):\n return x ==ENDMARKER\n \n \ndef _main():\n import re\n import sys\n args=sys.argv[1:]\n inFileName=args and args[0]or \"Include/token.h\"\n outFileName=\"Lib/token.py\"\n if len(args)>1:\n outFileName=args[1]\n try :\n fp=open(inFileName)\n except OSError as err:\n sys.stdout.write(\"I/O error: %s\\n\"%str(err))\n sys.exit(1)\n with fp:\n lines=fp.read().split(\"\\n\")\n prog=re.compile(\n r\"#define[ \\t][ \\t]*([A-Z0-9][A-Z0-9_]*)[ \\t][ \\t]*([0-9][0-9]*)\",\n re.IGNORECASE)\n comment_regex=re.compile(\n r\"^\\s*/\\*\\s*(.+?)\\s*\\*/\\s*$\",\n re.IGNORECASE)\n \n tokens={}\n prev_val=None\n for line in lines:\n match=prog.match(line)\n if match:\n name,val=match.group(1,2)\n val=int(val)\n tokens[val]={'token':name}\n prev_val=val\n else :\n comment_match=comment_regex.match(line)\n if comment_match and prev_val is not None :\n comment=comment_match.group(1)\n tokens[prev_val]['comment']=comment\n keys=sorted(tokens.keys())\n \n try :\n fp=open(outFileName)\n except OSError as err:\n sys.stderr.write(\"I/O error: %s\\n\"%str(err))\n sys.exit(2)\n with fp:\n format=fp.read().split(\"\\n\")\n try :\n start=format.index(\"#--start constants--\")+1\n end=format.index(\"#--end constants--\")\n except ValueError:\n sys.stderr.write(\"target does not contain format markers\")\n sys.exit(3)\n lines=[]\n for key in keys:\n lines.append(\"%s = %d\"%(tokens[key][\"token\"],key))\n if \"comment\"in tokens[key]:\n lines.append(\"# %s\"%tokens[key][\"comment\"])\n format[start:end]=lines\n try :\n fp=open(outFileName,'w')\n except OSError as err:\n sys.stderr.write(\"I/O error: %s\\n\"%str(err))\n sys.exit(4)\n with fp:\n fp.write(\"\\n\".join(format))\n \n \nif __name__ ==\"__main__\":\n _main()\n", ["re", "sys"]], "tokenize": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__='Ka-Ping Yee '\n__credits__=('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '\n'Skip Montanaro, Raymond Hettinger, Trent Nelson, '\n'Michael Foord')\nfrom builtins import open as _builtin_open\nfrom codecs import lookup,BOM_UTF8\nimport collections\nfrom io import TextIOWrapper\nfrom itertools import chain\nimport itertools as _itertools\nimport re\nimport sys\nfrom token import *\n\ncookie_re=re.compile(r'^[ \\t\\f]*#.*?coding[:=][ \\t]*([-\\w.]+)',re.ASCII)\nblank_re=re.compile(br'^[ \\t\\f]*(?:[#\\r\\n]|$)',re.ASCII)\n\nimport token\n__all__=token.__all__+[\"tokenize\",\"detect_encoding\",\n\"untokenize\",\"TokenInfo\"]\ndel token\n\nEXACT_TOKEN_TYPES={\n'(':LPAR,\n')':RPAR,\n'[':LSQB,\n']':RSQB,\n':':COLON,\n',':COMMA,\n';':SEMI,\n'+':PLUS,\n'-':MINUS,\n'*':STAR,\n'/':SLASH,\n'|':VBAR,\n'&':AMPER,\n'<':LESS,\n'>':GREATER,\n'=':EQUAL,\n'.':DOT,\n'%':PERCENT,\n'{':LBRACE,\n'}':RBRACE,\n'==':EQEQUAL,\n'!=':NOTEQUAL,\n'<=':LESSEQUAL,\n'>=':GREATEREQUAL,\n'~':TILDE,\n'^':CIRCUMFLEX,\n'<<':LEFTSHIFT,\n'>>':RIGHTSHIFT,\n'**':DOUBLESTAR,\n'+=':PLUSEQUAL,\n'-=':MINEQUAL,\n'*=':STAREQUAL,\n'/=':SLASHEQUAL,\n'%=':PERCENTEQUAL,\n'&=':AMPEREQUAL,\n'|=':VBAREQUAL,\n'^=':CIRCUMFLEXEQUAL,\n'<<=':LEFTSHIFTEQUAL,\n'>>=':RIGHTSHIFTEQUAL,\n'**=':DOUBLESTAREQUAL,\n'//':DOUBLESLASH,\n'//=':DOUBLESLASHEQUAL,\n'...':ELLIPSIS,\n'->':RARROW,\n'@':AT,\n'@=':ATEQUAL,\n}\n\nclass TokenInfo(collections.namedtuple('TokenInfo','type string start end line')):\n def __repr__(self):\n annotated_type='%d (%s)'%(self.type,tok_name[self.type])\n return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)'%\n self._replace(type=annotated_type))\n \n @property\n def exact_type(self):\n if self.type ==OP and self.string in EXACT_TOKEN_TYPES:\n return EXACT_TOKEN_TYPES[self.string]\n else :\n return self.type\n \ndef group(*choices):return '('+'|'.join(choices)+')'\ndef any(*choices):return group(*choices)+'*'\ndef maybe(*choices):return group(*choices)+'?'\n\n\n\nWhitespace=r'[ \\f\\t]*'\nComment=r'#[^\\r\\n]*'\nIgnore=Whitespace+any(r'\\\\\\r?\\n'+Whitespace)+maybe(Comment)\nName=r'\\w+'\n\nHexnumber=r'0[xX](?:_?[0-9a-fA-F])+'\nBinnumber=r'0[bB](?:_?[01])+'\nOctnumber=r'0[oO](?:_?[0-7])+'\nDecnumber=r'(?:0(?:_?0)*|[1-9](?:_?[0-9])*)'\nIntnumber=group(Hexnumber,Binnumber,Octnumber,Decnumber)\nExponent=r'[eE][-+]?[0-9](?:_?[0-9])*'\nPointfloat=group(r'[0-9](?:_?[0-9])*\\.(?:[0-9](?:_?[0-9])*)?',\nr'\\.[0-9](?:_?[0-9])*')+maybe(Exponent)\nExpfloat=r'[0-9](?:_?[0-9])*'+Exponent\nFloatnumber=group(Pointfloat,Expfloat)\nImagnumber=group(r'[0-9](?:_?[0-9])*[jJ]',Floatnumber+r'[jJ]')\nNumber=group(Imagnumber,Floatnumber,Intnumber)\n\n\ndef _all_string_prefixes():\n\n\n\n _valid_string_prefixes=['b','r','u','f','br','fr']\n \n result={''}\n for prefix in _valid_string_prefixes:\n for t in _itertools.permutations(prefix):\n \n \n for u in _itertools.product(*[(c,c.upper())for c in t]):\n result.add(''.join(u))\n return result\n \ndef _compile(expr):\n return re.compile(expr,re.UNICODE)\n \n \n \nStringPrefix=group(*_all_string_prefixes())\n\n\nSingle=r\"[^'\\\\]*(?:\\\\.[^'\\\\]*)*'\"\n\nDouble=r'[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\"'\n\nSingle3=r\"[^'\\\\]*(?:(?:\\\\.|'(?!''))[^'\\\\]*)*'''\"\n\nDouble3=r'[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\"'\nTriple=group(StringPrefix+\"'''\",StringPrefix+'\"\"\"')\n\nString=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*'\",\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*\"')\n\n\n\n\nOperator=group(r\"\\*\\*=?\",r\">>=?\",r\"<<=?\",r\"!=\",\nr\"//=?\",r\"->\",\nr\"[+\\-*/%&@|^=<>]=?\",\nr\"~\")\n\nBracket='[][(){}]'\nSpecial=group(r'\\r?\\n',r'\\.\\.\\.',r'[:;.,@]')\nFunny=group(Operator,Bracket,Special)\n\nPlainToken=group(Number,Funny,String,Name)\nToken=Ignore+PlainToken\n\n\nContStr=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*\"+\ngroup(\"'\",r'\\\\\\r?\\n'),\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*'+\ngroup('\"',r'\\\\\\r?\\n'))\nPseudoExtras=group(r'\\\\\\r?\\n|\\Z',Comment,Triple)\nPseudoToken=Whitespace+group(PseudoExtras,Number,Funny,ContStr,Name)\n\n\n\n\nendpats={}\nfor _prefix in _all_string_prefixes():\n endpats[_prefix+\"'\"]=Single\n endpats[_prefix+'\"']=Double\n endpats[_prefix+\"'''\"]=Single3\n endpats[_prefix+'\"\"\"']=Double3\n \n \n \nsingle_quoted=set()\ntriple_quoted=set()\nfor t in _all_string_prefixes():\n for u in (t+'\"',t+\"'\"):\n single_quoted.add(u)\n for u in (t+'\"\"\"',t+\"'''\"):\n triple_quoted.add(u)\n \ntabsize=8\n\nclass TokenError(Exception):pass\n\nclass StopTokenizing(Exception):pass\n\n\nclass Untokenizer:\n\n def __init__(self):\n self.tokens=[]\n self.prev_row=1\n self.prev_col=0\n self.encoding=None\n \n def add_whitespace(self,start):\n row,col=start\n if row =len(indent):\n self.tokens.append(indent)\n self.prev_col=len(indent)\n startline=False\n self.add_whitespace(start)\n self.tokens.append(token)\n self.prev_row,self.prev_col=end\n if tok_type in (NEWLINE,NL):\n self.prev_row +=1\n self.prev_col=0\n return \"\".join(self.tokens)\n \n def compat(self,token,iterable):\n indents=[]\n toks_append=self.tokens.append\n startline=token[0]in (NEWLINE,NL)\n prevstring=False\n \n for tok in chain([token],iterable):\n toknum,tokval=tok[:2]\n if toknum ==ENCODING:\n self.encoding=tokval\n continue\n \n if toknum in (NAME,NUMBER):\n tokval +=' '\n \n \n if toknum ==STRING:\n if prevstring:\n tokval=' '+tokval\n prevstring=True\n else :\n prevstring=False\n \n if toknum ==INDENT:\n indents.append(tokval)\n continue\n elif toknum ==DEDENT:\n indents.pop()\n continue\n elif toknum in (NEWLINE,NL):\n startline=True\n elif startline and indents:\n toks_append(indents[-1])\n startline=False\n toks_append(tokval)\n \n \ndef untokenize(iterable):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ut=Untokenizer()\n out=ut.untokenize(iterable)\n if ut.encoding is not None :\n out=out.encode(ut.encoding)\n return out\n \n \ndef _get_normal_name(orig_enc):\n ''\n \n enc=orig_enc[:12].lower().replace(\"_\",\"-\")\n if enc ==\"utf-8\"or enc.startswith(\"utf-8-\"):\n return \"utf-8\"\n if enc in (\"latin-1\",\"iso-8859-1\",\"iso-latin-1\")or\\\n enc.startswith((\"latin-1-\",\"iso-8859-1-\",\"iso-latin-1-\")):\n return \"iso-8859-1\"\n return orig_enc\n \ndef detect_encoding(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n filename=readline.__self__.name\n except AttributeError:\n filename=None\n bom_found=False\n encoding=None\n default='utf-8'\n def read_or_stop():\n try :\n return readline()\n except StopIteration:\n return b''\n \n def find_cookie(line):\n try :\n \n \n \n line_string=line.decode('utf-8')\n except UnicodeDecodeError:\n msg=\"invalid or missing encoding declaration\"\n if filename is not None :\n msg='{} for {!r}'.format(msg,filename)\n raise SyntaxError(msg)\n \n match=cookie_re.match(line_string)\n if not match:\n return None\n encoding=_get_normal_name(match.group(1))\n try :\n codec=lookup(encoding)\n except LookupError:\n \n if filename is None :\n msg=\"unknown encoding: \"+encoding\n else :\n msg=\"unknown encoding for {!r}: {}\".format(filename,\n encoding)\n raise SyntaxError(msg)\n \n if bom_found:\n if encoding !='utf-8':\n \n if filename is None :\n msg='encoding problem: utf-8'\n else :\n msg='encoding problem for {!r}: utf-8'.format(filename)\n raise SyntaxError(msg)\n encoding +='-sig'\n return encoding\n \n first=read_or_stop()\n if first.startswith(BOM_UTF8):\n bom_found=True\n first=first[3:]\n default='utf-8-sig'\n if not first:\n return default,[]\n \n encoding=find_cookie(first)\n if encoding:\n return encoding,[first]\n if not blank_re.match(first):\n return default,[first]\n \n second=read_or_stop()\n if not second:\n return default,[first]\n \n encoding=find_cookie(second)\n if encoding:\n return encoding,[first,second]\n \n return default,[first,second]\n \n \ndef open(filename):\n ''\n\n \n buffer=_builtin_open(filename,'rb')\n try :\n encoding,lines=detect_encoding(buffer.readline)\n buffer.seek(0)\n text=TextIOWrapper(buffer,encoding,line_buffering=True )\n text.mode='r'\n return text\n except :\n buffer.close()\n raise\n \n \ndef tokenize(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n from itertools import chain,repeat\n encoding,consumed=detect_encoding(readline)\n rl_gen=iter(readline,b\"\")\n empty=repeat(b\"\")\n return _tokenize(chain(consumed,rl_gen,empty).__next__,encoding)\n \n \ndef _tokenize(readline,encoding):\n lnum=parenlev=continued=0\n numchars='0123456789'\n contstr,needcont='',0\n contline=None\n indents=[0]\n \n if encoding is not None :\n if encoding ==\"utf-8-sig\":\n \n encoding=\"utf-8\"\n yield TokenInfo(ENCODING,encoding,(0,0),(0,0),'')\n while True :\n try :\n line=readline()\n except StopIteration:\n line=b''\n \n if encoding is not None :\n line=line.decode(encoding)\n lnum +=1\n pos,max=0,len(line)\n \n if contstr:\n if not line:\n raise TokenError(\"EOF in multi-line string\",strstart)\n endmatch=endprog.match(line)\n if endmatch:\n pos=end=endmatch.end(0)\n yield TokenInfo(STRING,contstr+line[:end],\n strstart,(lnum,end),contline+line)\n contstr,needcont='',0\n contline=None\n elif needcont and line[-2:]!='\\\\\\n'and line[-3:]!='\\\\\\r\\n':\n yield TokenInfo(ERRORTOKEN,contstr+line,\n strstart,(lnum,len(line)),contline)\n contstr=''\n contline=None\n continue\n else :\n contstr=contstr+line\n contline=contline+line\n continue\n \n elif parenlev ==0 and not continued:\n if not line:break\n column=0\n while pos indents[-1]:\n indents.append(column)\n yield TokenInfo(INDENT,line[:pos],(lnum,0),(lnum,pos),line)\n while column \",lnum,pos,line))\n indents=indents[:-1]\n \n yield TokenInfo(DEDENT,'',(lnum,pos),(lnum,pos),line)\n \n else :\n if not line:\n raise TokenError(\"EOF in multi-line statement\",(lnum,0))\n continued=0\n \n while pos 0:\n yield TokenInfo(NL,token,spos,epos,line)\n else :\n yield TokenInfo(NEWLINE,token,spos,epos,line)\n \n elif initial =='#':\n assert not token.endswith(\"\\n\")\n yield TokenInfo(COMMENT,token,spos,epos,line)\n \n elif token in triple_quoted:\n endprog=_compile(endpats[token])\n endmatch=endprog.match(line,pos)\n if endmatch:\n pos=endmatch.end(0)\n token=line[start:pos]\n yield TokenInfo(STRING,token,spos,(lnum,pos),line)\n else :\n strstart=(lnum,start)\n contstr=line[start:]\n contline=line\n break\n \n \n \n \n \n \n \n \n \n \n \n elif (initial in single_quoted or\n token[:2]in single_quoted or\n token[:3]in single_quoted):\n if token[-1]=='\\n':\n strstart=(lnum,start)\n \n \n \n \n \n \n endprog=_compile(endpats.get(initial)or\n endpats.get(token[1])or\n endpats.get(token[2]))\n contstr,needcont=line[start:],1\n contline=line\n break\n else :\n yield TokenInfo(STRING,token,spos,epos,line)\n \n elif initial.isidentifier():\n yield TokenInfo(NAME,token,spos,epos,line)\n elif initial =='\\\\':\n continued=1\n else :\n if initial in '([{':\n parenlev +=1\n elif initial in ')]}':\n parenlev -=1\n yield TokenInfo(OP,token,spos,epos,line)\n else :\n yield TokenInfo(ERRORTOKEN,line[pos],\n (lnum,pos),(lnum,pos+1),line)\n pos +=1\n \n for indent in indents[1:]:\n yield TokenInfo(DEDENT,'',(lnum,0),(lnum,0),'')\n yield TokenInfo(ENDMARKER,'',(lnum,0),(lnum,0),'')\n \n \n \n \ndef generate_tokens(readline):\n return _tokenize(readline,None )\n \ndef main():\n import argparse\n \n \n def perror(message):\n print(message,file=sys.stderr)\n \n def error(message,filename=None ,location=None ):\n if location:\n args=(filename,)+location+(message,)\n perror(\"%s:%d:%d: error: %s\"%args)\n elif filename:\n perror(\"%s: error: %s\"%(filename,message))\n else :\n perror(\"error: %s\"%message)\n sys.exit(1)\n \n \n parser=argparse.ArgumentParser(prog='python -m tokenize')\n parser.add_argument(dest='filename',nargs='?',\n metavar='filename.py',\n help='the file to tokenize; defaults to stdin')\n parser.add_argument('-e','--exact',dest='exact',action='store_true',\n help='display token names using the exact type')\n args=parser.parse_args()\n \n try :\n \n if args.filename:\n filename=args.filename\n with _builtin_open(filename,'rb')as f:\n tokens=list(tokenize(f.readline))\n else :\n filename=\"\"\n tokens=_tokenize(sys.stdin.readline,None )\n \n \n for token in tokens:\n token_type=token.type\n if args.exact:\n token_type=token.exact_type\n token_range=\"%d,%d-%d,%d:\"%(token.start+token.end)\n print(\"%-20s%-15s%-15r\"%\n (token_range,tok_name[token_type],token.string))\n except IndentationError as err:\n line,column=err.args[1][1:3]\n error(err.args[0],filename,(line,column))\n except TokenError as err:\n line,column=err.args[1]\n error(err.args[0],filename,(line,column))\n except SyntaxError as err:\n error(err,filename)\n except OSError as err:\n error(err)\n except KeyboardInterrupt:\n print(\"interrupted\\n\")\n except Exception as err:\n perror(\"unexpected error: %s\"%err)\n raise\n \nif __name__ ==\"__main__\":\n main()\n", ["argparse", "builtins", "codecs", "collections", "io", "itertools", "re", "sys", "token"]], "traceback": [".py", "''\n\nimport collections\nimport itertools\nimport linecache\nimport sys\n\n__all__=['extract_stack','extract_tb','format_exception',\n'format_exception_only','format_list','format_stack',\n'format_tb','print_exc','format_exc','print_exception',\n'print_last','print_stack','print_tb','clear_frames',\n'FrameSummary','StackSummary','TracebackException',\n'walk_stack','walk_tb']\n\n\n\n\n\ndef print_list(extracted_list,file=None ):\n ''\n \n if file is None :\n file=sys.stderr\n for item in StackSummary.from_list(extracted_list).format():\n print(item,file=file,end=\"\")\n \ndef format_list(extracted_list):\n ''\n\n\n\n\n\n\n\n \n return StackSummary.from_list(extracted_list).format()\n \n \n \n \n \ndef print_tb(tb,limit=None ,file=None ):\n ''\n\n\n\n\n\n \n print_list(extract_tb(tb,limit=limit),file=file)\n \ndef format_tb(tb,limit=None ):\n ''\n return extract_tb(tb,limit=limit).format()\n \ndef extract_tb(tb,limit=None ):\n ''\n\n\n\n\n\n\n\n\n \n return StackSummary.extract(walk_tb(tb),limit=limit)\n \n \n \n \n \n_cause_message=(\n\"\\nThe above exception was the direct cause \"\n\"of the following exception:\\n\\n\")\n\n_context_message=(\n\"\\nDuring handling of the above exception, \"\n\"another exception occurred:\\n\\n\")\n\n\ndef print_exception(etype,value,tb,limit=None ,file=None ,chain=True ):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n if file is None :\n file=sys.stderr\n for line in TracebackException(\n type(value),value,tb,limit=limit).format(chain=chain):\n print(line,file=file,end=\"\")\n \n \ndef format_exception(etype,value,tb,limit=None ,chain=True ):\n ''\n\n\n\n\n\n\n \n \n \n \n return list(TracebackException(\n type(value),value,tb,limit=limit).format(chain=chain))\n \n \ndef format_exception_only(etype,value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return list(TracebackException(etype,value,None ).format_exception_only())\n \n \n \n \ndef _format_final_exc_line(etype,value):\n valuestr=_some_str(value)\n if value is None or not valuestr:\n line=\"%s\\n\"%etype\n else :\n line=\"%s: %s\\n\"%(etype,valuestr)\n return line\n \ndef _some_str(value):\n try :\n return str(value)\n except :\n return ''%type(value).__name__\n \n \n \ndef print_exc(limit=None ,file=None ,chain=True ):\n ''\n print_exception(*sys.exc_info(),limit=limit,file=file,chain=chain)\n \ndef format_exc(limit=None ,chain=True ):\n ''\n return \"\".join(format_exception(*sys.exc_info(),limit=limit,chain=chain))\n \ndef print_last(limit=None ,file=None ,chain=True ):\n ''\n \n if not hasattr(sys,\"last_type\"):\n raise ValueError(\"no last exception\")\n print_exception(sys.last_type,sys.last_value,sys.last_traceback,\n limit,file,chain)\n \n \n \n \n \ndef print_stack(f=None ,limit=None ,file=None ):\n ''\n\n\n\n\n \n if f is None :\n f=sys._getframe().f_back\n print_list(extract_stack(f,limit=limit),file=file)\n \n \ndef format_stack(f=None ,limit=None ):\n ''\n if f is None :\n f=sys._getframe().f_back\n return format_list(extract_stack(f,limit=limit))\n \n \ndef extract_stack(f=None ,limit=None ):\n ''\n\n\n\n\n\n\n \n if f is None :\n f=sys._getframe().f_back\n stack=StackSummary.extract(walk_stack(f),limit=limit)\n stack.reverse()\n return stack\n \n \ndef clear_frames(tb):\n ''\n while tb is not None :\n try :\n tb.tb_frame.clear()\n except RuntimeError:\n \n pass\n tb=tb.tb_next\n \n \nclass FrameSummary:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('filename','lineno','name','_line','locals')\n \n def __init__(self,filename,lineno,name,*,lookup_line=True ,\n locals=None ,line=None ):\n ''\n\n\n\n\n\n\n\n \n self.filename=filename\n self.lineno=lineno\n self.name=name\n self._line=line\n if lookup_line:\n self.line\n self.locals={k:repr(v)for k,v in locals.items()}if locals else None\n \n def __eq__(self,other):\n if isinstance(other,FrameSummary):\n return (self.filename ==other.filename and\n self.lineno ==other.lineno and\n self.name ==other.name and\n self.locals ==other.locals)\n if isinstance(other,tuple):\n return (self.filename,self.lineno,self.name,self.line)==other\n return NotImplemented\n \n def __getitem__(self,pos):\n return (self.filename,self.lineno,self.name,self.line)[pos]\n \n def __iter__(self):\n return iter([self.filename,self.lineno,self.name,self.line])\n \n def __repr__(self):\n return \"\".format(\n filename=self.filename,lineno=self.lineno,name=self.name)\n \n @property\n def line(self):\n if self._line is None :\n self._line=linecache.getline(self.filename,self.lineno).strip()\n return self._line\n \n \ndef walk_stack(f):\n ''\n\n\n\n \n if f is None :\n f=sys._getframe().f_back.f_back\n while f is not None :\n yield f,f.f_lineno\n f=f.f_back\n \n \ndef walk_tb(tb):\n ''\n\n\n\n \n while tb is not None :\n yield tb.tb_frame,tb.tb_lineno\n tb=tb.tb_next\n \n \nclass StackSummary(list):\n ''\n \n @classmethod\n def extract(klass,frame_gen,*,limit=None ,lookup_lines=True ,\n capture_locals=False ):\n ''\n\n\n\n\n\n\n\n\n\n \n if limit is None :\n limit=getattr(sys,'tracebacklimit',None )\n if limit is not None and limit <0:\n limit=0\n if limit is not None :\n if limit >=0:\n frame_gen=itertools.islice(frame_gen,limit)\n else :\n frame_gen=collections.deque(frame_gen,maxlen=-limit)\n \n result=klass()\n fnames=set()\n for f,lineno in frame_gen:\n co=f.f_code\n filename=co.co_filename\n name=co.co_name\n \n fnames.add(filename)\n linecache.lazycache(filename,f.f_globals)\n \n if capture_locals:\n f_locals=f.f_locals\n else :\n f_locals=None\n result.append(FrameSummary(\n filename,lineno,name,lookup_line=False ,locals=f_locals))\n for filename in fnames:\n linecache.checkcache(filename)\n \n if lookup_lines:\n for f in result:\n f.line\n return result\n \n @classmethod\n def from_list(klass,a_list):\n ''\n\n\n\n \n \n \n \n \n result=StackSummary()\n for frame in a_list:\n if isinstance(frame,FrameSummary):\n result.append(frame)\n else :\n filename,lineno,name,line=frame\n result.append(FrameSummary(filename,lineno,name,line=line))\n return result\n \n def format(self):\n ''\n\n\n\n\n\n\n\n\n\n \n result=[]\n last_file=None\n last_line=None\n last_name=None\n count=0\n for frame in self:\n if (last_file is not None and last_file ==frame.filename and\n last_line is not None and last_line ==frame.lineno and\n last_name is not None and last_name ==frame.name):\n count +=1\n else :\n if count >3:\n result.append(f' [Previous line repeated {count-3} more times]\\n')\n last_file=frame.filename\n last_line=frame.lineno\n last_name=frame.name\n count=0\n if count >=3:\n continue\n row=[]\n row.append(' File \"{}\", line {}, in {}\\n'.format(\n frame.filename,frame.lineno,frame.name))\n if frame.line:\n row.append(' {}\\n'.format(frame.line.strip()))\n if frame.locals:\n for name,value in sorted(frame.locals.items()):\n row.append(' {name} = {value}\\n'.format(name=name,value=value))\n result.append(''.join(row))\n if count >3:\n result.append(f' [Previous line repeated {count-3} more times]\\n')\n return result\n \n \nclass TracebackException:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,exc_type,exc_value,exc_traceback,*,limit=None ,\n lookup_lines=True ,capture_locals=False ,_seen=None ):\n \n \n \n \n if _seen is None :\n _seen=set()\n _seen.add(id(exc_value))\n \n \n if (exc_value and exc_value.__cause__ is not None\n and id(exc_value.__cause__)not in _seen):\n cause=TracebackException(\n type(exc_value.__cause__),\n exc_value.__cause__,\n exc_value.__cause__.__traceback__,\n limit=limit,\n lookup_lines=False ,\n capture_locals=capture_locals,\n _seen=_seen)\n else :\n cause=None\n if (exc_value and exc_value.__context__ is not None\n and id(exc_value.__context__)not in _seen):\n context=TracebackException(\n type(exc_value.__context__),\n exc_value.__context__,\n exc_value.__context__.__traceback__,\n limit=limit,\n lookup_lines=False ,\n capture_locals=capture_locals,\n _seen=_seen)\n else :\n context=None\n self.exc_traceback=exc_traceback\n self.__cause__=cause\n self.__context__=context\n self.__suppress_context__=\\\n exc_value.__suppress_context__ if exc_value else False\n \n self.stack=StackSummary.extract(\n walk_tb(exc_traceback),limit=limit,lookup_lines=lookup_lines,\n capture_locals=capture_locals)\n self.exc_type=exc_type\n \n \n self._str=_some_str(exc_value)\n if exc_type and issubclass(exc_type,SyntaxError):\n \n self.filename=exc_value.filename\n self.lineno=str(exc_value.lineno)\n self.text=exc_value.text\n self.offset=exc_value.offset\n self.msg=exc_value.msg\n if lookup_lines:\n self._load_lines()\n \n @classmethod\n def from_exception(cls,exc,*args,**kwargs):\n ''\n return cls(type(exc),exc,exc.__traceback__,*args,**kwargs)\n \n def _load_lines(self):\n ''\n for frame in self.stack:\n frame.line\n if self.__context__:\n self.__context__._load_lines()\n if self.__cause__:\n self.__cause__._load_lines()\n \n def __eq__(self,other):\n return self.__dict__ ==other.__dict__\n \n def __str__(self):\n return self._str\n \n def format_exception_only(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if self.exc_type is None :\n yield _format_final_exc_line(None ,self._str)\n return\n \n stype=self.exc_type.__qualname__\n smod=self.exc_type.__module__\n if smod not in (\"__main__\",\"builtins\"):\n stype=smod+'.'+stype\n \n if not issubclass(self.exc_type,SyntaxError):\n yield _format_final_exc_line(stype,self._str)\n return\n \n \n filename=self.filename or \"\"\n lineno=str(self.lineno)or '?'\n yield ' File \"{}\", line {}\\n'.format(filename,lineno)\n \n badline=self.text\n offset=self.offset\n if badline is not None :\n yield ' {}\\n'.format(badline.strip())\n if offset is not None :\n caretspace=badline.rstrip('\\n')\n offset=min(len(caretspace),offset)-1\n caretspace=caretspace[:offset].lstrip()\n \n caretspace=((c.isspace()and c or ' ')for c in caretspace)\n yield ' {}^\\n'.format(''.join(caretspace))\n msg=self.msg or \"\"\n yield \"{}: {}\\n\".format(stype,msg)\n \n def format(self,*,chain=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n if chain:\n if self.__cause__ is not None :\n yield from self.__cause__.format(chain=chain)\n yield _cause_message\n elif (self.__context__ is not None and\n not self.__suppress_context__):\n yield from self.__context__.format(chain=chain)\n yield _context_message\n if self.exc_traceback is not None :\n yield 'Traceback (most recent call last):\\n'\n yield from self.stack.format()\n yield from self.format_exception_only()\n", ["collections", "itertools", "linecache", "sys"]], "turtle": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport math\nimport sys\n\nfrom math import cos,sin\n\nfrom browser import console,document,html,timer\nimport _svg as svg\nimport copy\n\n\n\n\n\n_CFG={\n\n\n\"canvwidth\":500,\n\"canvheight\":500,\n\n\n\"mode\":\"standard\",\n\n\n\n\"shape\":\"classic\",\n\"pencolor\":\"black\",\n\"fillcolor\":\"black\",\n\n\"visible\":True ,\n\n\n\n\n\n\n\n\"turtle_canvas_wrapper\":None ,\n\"turtle_canvas_id\":\"turtle-canvas\",\n\"min_duration\":\"1ms\"\n}\n\n_cfg_copy=copy.copy(_CFG)\n\n\ndef set_defaults(**params):\n ''\n _CFG.update(**params)\n Screen().reset()\n \n \nclass FormattedTuple(tuple):\n ''\n def __new__(cls,x,y):\n return tuple.__new__(cls,(x,y))\n def __repr__(self):\n return \"(%.2f, %.2f)\"%self\n \ndef create_circle(r):\n ''\n circle=svg.circle(x=0,y=0,r=r,stroke=\"black\",fill=\"black\")\n circle.setAttribute(\"stroke-width\",1)\n return circle\n \n \ndef create_polygon(points):\n ''\n points=[\"%s,%s \"%(x,y)for x,y in points]\n polygon=svg.polygon(points=points,stroke=\"black\",fill=\"black\")\n polygon.setAttribute(\"stroke-width\",1)\n return polygon\n \n \ndef create_rectangle(width=2,height=2,rx=None ,ry=None ):\n ''\n \n rectangle=svg.rect(x=-width /2,y=-height /2,width=width,\n height=height,stroke=\"black\",fill=\"black\")\n rectangle.setAttribute(\"stroke-width\",1)\n if rx is not None :\n rectangle.setAttribute(\"rx\",rx)\n if ry is not None :\n rectangle.setAttribute(\"ry\",ry)\n return rectangle\n \n \ndef create_square(size=2,r=None ):\n ''\n \n return create_rectangle(width=size,height=size,rx=r,ry=r)\n \n \nclass TurtleGraphicsError(Exception):\n ''\n \n pass\n \n \nclass Singleton(type):\n _instances={}\n def __call__(cls,*args,**kwargs):\n if cls not in cls._instances:\n cls._instances[cls]=super(Singleton,cls).__call__(*args,**kwargs)\n return cls._instances[cls]\n \n \nclass Screen(metaclass=Singleton):\n\n def __init__(self):\n self.shapes={\n 'arrow':(create_polygon,((-10,0),(10,0),(0,10))),\n 'turtle':(create_polygon,((0,16),(-2,14),(-1,10),(-4,7),\n (-7,9),(-9,8),(-6,5),(-7,1),(-5,-3),(-8,-6),\n (-6,-8),(-4,-5),(0,-7),(4,-5),(6,-8),(8,-6),\n (5,-3),(7,1),(6,5),(9,8),(7,9),(4,7),(1,10),\n (2,14))),\n 'classic':(create_polygon,((0,0),(-5,-9),(0,-7),(5,-9))),\n 'triangle':(create_polygon,((10,-5.77),(0,11.55),(-10,-5.77))),\n 'square':(create_square,20),\n 'circle':(create_circle,10)\n }\n self.reset()\n self._set_geometry()\n \n def bgcolor(self,color=None ):\n ''\n\n \n if color is None :\n return self.background_color\n self.background_color=color\n width=_CFG['canvwidth']\n height=_CFG['canvheight']\n if self.mode()in ['logo','standard']:\n x=-width //2\n y=-height //2\n else :\n x=0\n y=-height\n \n self.frame_index +=1\n rect=svg.rect(x=x,y=y,width=width,height=height,fill=color,\n style={'display':'none'})\n an=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"display\",attributeType=\"CSS\",\n From=\"block\",to=\"block\",dur=_CFG[\"min_duration\"],\n fill='freeze')\n an.setAttribute('begin',\"animation_frame%s.end\"%(self.frame_index -1))\n rect <=an\n \n self.background_canvas <=rect\n \n def _convert_coordinates(self,x,y):\n ''\n\n\n\n\n\n \n return x *self.yscale,self.y_points_down *y *self.yscale\n \n \n def create_svg_turtle(self,_turtle,name):\n if name in self.shapes:\n fn=self.shapes[name][0]\n arg=self.shapes[name][1]\n else :\n print(\"Unknown turtle '%s'; the default turtle will be used\")\n fn=self.shapes[_CVG[\"shape\"]][0]\n arg=self.shapes[_CVG[\"shape\"]][1]\n shape=fn(arg)\n if self._mode =='standard'or self._mode =='world':\n rotation=-90\n else :\n rotation=0\n return shape,rotation\n \n def _dot(self,pos,size,color):\n ''\n if color is None :\n color='black'\n if size is None or size <1:\n size=1\n self.frame_index +=1\n \n x,y=self._convert_coordinates(pos[0],pos[1])\n \n circle=svg.circle(cx=x,cy=y,r=size,fill=color,\n style={'display':'none'})\n an=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"display\",attributeType=\"CSS\",\n From=\"block\",to=\"block\",dur=_CFG[\"min_duration\"],\n fill='freeze')\n an.setAttribute('begin',\"animation_frame%s.end\"%(self.frame_index -1))\n circle <=an\n self.canvas <=circle\n \n def _drawline(self,_turtle,coordlist=None ,\n color=None ,width=1,speed=None ):\n ''\n\n\n\n\n \n \n outline=color[0]\n fill=color[1]\n \n x0,y0=coordlist[0]\n x1,y1=coordlist[1]\n \n x0,y0=self._convert_coordinates(x0,y0)\n x1,y1=self._convert_coordinates(x1,y1)\n \n \n if speed ==0:\n duration=_CFG[\"min_duration\"]\n else :\n dist=_turtle._distance\n if speed is None or speed ==1:\n duration=0.02 *dist\n else :\n duration=0.02 *dist /speed **1.2\n if duration <0.001:\n duration=_CFG[\"min_duration\"]\n else :\n duration=\"%6.3fs\"%duration\n \n drawing=_turtle._drawing\n \n _line=svg.line(x1=x0,y1=y0,x2=x0,y2=y0,\n style={'stroke':outline,'stroke-width':width})\n if not drawing:\n _line.setAttribute('opacity',0)\n \n \n begin=\"animation_frame%s.end\"%self.frame_index\n self.frame_index +=1\n _an1=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"x2\",attributeType=\"XML\",\n From=x0,to=x1,dur=duration,fill='freeze',\n begin=begin)\n _line <=_an1\n \n \n if drawing:\n _an2=svg.animate(attributeName=\"y2\",attributeType=\"XML\",\n begin=begin,\n From=y0,to=y1,dur=duration,fill='freeze')\n _line <=_an2\n \n if width >2:\n _line_cap=svg.set(attributeName=\"stroke-linecap\",\n begin=begin,\n attributeType=\"xml\",to=\"round\",dur=duration,fill='freeze')\n _line <=_line_cap\n \n self.canvas <=_line\n return begin,duration,(x0,y0),(x1,y1)\n \n def _drawpoly(self,coordlist,outline=None ,fill=None ,width=None ):\n ''\n\n\n\n\n \n self.frame_index +=1\n shape=[\"%s,%s\"%self._convert_coordinates(x,y)for x,y in coordlist]\n \n style={'display':'none'}\n if fill is not None :\n style['fill']=fill\n if outline is not None :\n style['stroke']=outline\n if width is not None :\n style['stroke-width']=width\n else :\n style['stroke-width']=1\n \n polygon=svg.polygon(points=\" \".join(shape),style=style)\n \n an=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"display\",attributeType=\"CSS\",\n From=\"block\",to=\"block\",dur=_CFG[\"min_duration\"],\n fill='freeze')\n \n an.setAttribute('begin',\"animation_frame%s.end\"%(self.frame_index -1))\n polygon <=an\n self.canvas <=polygon\n \n \n def _new_frame(self):\n ''\n \n previous_end=\"animation_frame%s.end\"%self.frame_index\n self.frame_index +=1\n new_frame_id=\"animation_frame%s\"%self.frame_index\n return previous_end,new_frame_id\n \n def mode(self,_mode=None ):\n if _mode is None :\n return self._mode\n _CFG['mode']=_mode\n self.reset()\n \n \n def reset(self):\n self._turtles=[]\n self.frame_index=0\n self.background_color=\"white\"\n self._set_geometry()\n \n def _set_geometry(self):\n self.width=_CFG[\"canvwidth\"]\n self.height=_CFG[\"canvheight\"]\n self.x_offset=self.y_offset=0\n self.xscale=self.yscale=1\n \n self.y_points_down=-1\n self._mode=_CFG[\"mode\"].lower()\n if self._mode in ['logo','standard']:\n self.translate_canvas=(self.width //2,self.height //2)\n elif self._mode =='world':\n self.translate_canvas=(0,self.height)\n self._setup_canvas()\n \n def _setup_canvas(self):\n self.svg_scene=svg.svg(Id=_CFG[\"turtle_canvas_id\"],width=self.width,\n height=self.height)\n translate=\"translate(%d %d)\"%self.translate_canvas\n \n \n self.svg_scene <=svg.animate(\n Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"width\",attributeType=\"CSS\",\n From=self.width,to=self.width,begin=\"0s\",\n dur=_CFG[\"min_duration\"],fill='freeze')\n \n \n \n \n self.background_canvas=svg.g(transform=translate)\n self.canvas=svg.g(transform=translate)\n self.writing_canvas=svg.g(transform=translate)\n self.turtle_canvas=svg.g(transform=translate)\n \n self.svg_scene <=self.background_canvas\n self.svg_scene <=self.canvas\n self.svg_scene <=self.writing_canvas\n self.svg_scene <=self.turtle_canvas\n \n \n def setworldcoordinates(self,llx,lly,urx,ury):\n ''\n\n\n\n\n\n\n\n\n\n\n \n self._mode=\"world\"\n \n if urx 2:\n self.screen._drawpoly(self._fillpath,outline=self._pencolor,\n fill=self._fillcolor,)\n else :\n print(\"No path to fill.\")\n self._fillpath=None\n \n def dot(self,size=None ,color=None ):\n ''\n \n item=self.screen._dot((self._x,self._y),size,color=color)\n \n def _write(self,txt,align,font,color=None ):\n ''\n \n if color is None :\n color=self._pencolor\n self.screen._write((self._x,self._y),txt,align,font,color)\n \n \n def write(self,arg,align=\"left\",font=(\"Arial\",8,\"normal\"),color=None ):\n ''\n\n\n\n\n\n\n \n self._write(str(arg),align.lower(),font,color=color)\n \n def begin_poly(self):\n ''\n \n self._poly=[(self._x,self._y)]\n self._creatingPoly=True\n \n def end_poly(self):\n ''\n \n self._creatingPoly=False\n \n def get_poly(self):\n ''\n \n \n if self._poly is not None :\n return tuple(self._poly)\n \n def getscreen(self):\n ''\n \n return self.screen\n \n def getturtle(self):\n ''\n\n\n \n return self\n getpen=getturtle\n \n def _make_copy(self,name=None ):\n ''\n\n \n \n if name is None :\n name=self.name\n \n \n \n \n \n _turtle,rotation=self.screen.create_svg_turtle(self,name=name)\n _turtle.setAttribute(\"opacity\",0)\n _turtle.setAttribute(\"fill\",self._fillcolor)\n _turtle.setAttribute(\"stroke\",self._pencolor)\n \n \n \n previous_end,new_frame_id=self.screen._new_frame()\n x,y=self.screen._convert_coordinates(self._x,self._y)\n _turtle <=svg.animateMotion(begin=previous_end,dur=_CFG[\"min_duration\"],\n fill=\"remove\")\n \n _turtle <=svg.animateMotion(Id=new_frame_id,\n From=\"%s,%s\"%(x,y),to=\"%s,%s\"%(x,y),\n dur=_CFG[\"min_duration\"],begin=previous_end,\n fill=\"freeze\")\n _turtle <=svg.animateTransform(attributeName=\"transform\",\n type=\"rotate\",\n From=(self._old_heading,0,0),\n to=(self._old_heading,0,0),\n begin=previous_end,\n dur=_CFG[\"min_duration\"],fill=\"freeze\")\n _turtle <=svg.animate(begin=previous_end,\n dur=_CFG[\"min_duration\"],fill=\"freeze\",\n attributeName=\"opacity\",attributeType=\"XML\",\n From=0,to=1)\n return _turtle\n \n def stamp(self):\n ''\n \n _turtle=self._make_copy(name=self.name)\n self.screen.canvas <=_turtle\n \n \n def clone(self):\n ''\n \n n=Turtle(self.name)\n \n attrs=vars(self)\n new_dict={}\n for attr in attrs:\n if isinstance(getattr(self,attr),(int,str,float)):\n new_dict[attr]=getattr(self,attr)\n n.__dict__.update(**new_dict)\n \n if not n._shown:\n n._shown=True\n n.hideturtle()\n n.left(0)\n n.fd(0)\n n.color(n.color())\n return n\n \n \nPen=Turtle\n\n\ndef done():\n Screen().show_scene()\nshow_scene=done\n\n\ndef replay_scene():\n ''\n if (_CFG[\"turtle_canvas_id\"]in document and\n document[_CFG[\"turtle_canvas_id\"]]is not None ):\n element=document[_CFG[\"turtle_canvas_id\"]]\n element.parentNode.removeChild(element)\n show_scene()\n \n \ndef restart():\n ''\n _CFG.update(_cfg_copy)\n Screen().reset()\n Turtle._pen=None\n \n if (_CFG[\"turtle_canvas_id\"]in document and\n document[_CFG[\"turtle_canvas_id\"]]is not None ):\n element=document[_CFG[\"turtle_canvas_id\"]]\n element.parentNode.removeChild(element)\n \n \n \nimport inspect\ndef getmethparlist(ob):\n ''\n\n\n\n\n\n \n defText=callText=\"\"\n \n \n \n args,varargs,varkw=inspect.getargs(ob.__code__)\n items2=args[1:]\n realArgs=args[1:]\n defaults=ob.__defaults__ or []\n defaults=[\"=%r\"%(value,)for value in defaults]\n defaults=[\"\"]*(len(realArgs)-len(defaults))+defaults\n items1=[arg+dflt for arg,dflt in zip(realArgs,defaults)]\n if varargs is not None :\n items1.append(\"*\"+varargs)\n items2.append(\"*\"+varargs)\n if varkw is not None :\n items1.append(\"**\"+varkw)\n items2.append(\"**\"+varkw)\n defText=\", \".join(items1)\n defText=\"(%s)\"%defText\n callText=\", \".join(items2)\n callText=\"(%s)\"%callText\n return defText,callText\n \n_tg_screen_functions=['addshape','bgcolor','bgpic','bye',\n'clearscreen','colormode','delay','exitonclick','getcanvas',\n'getshapes','listen','mainloop','mode','numinput',\n'onkey','onkeypress','onkeyrelease','onscreenclick','ontimer',\n'register_shape','resetscreen','screensize','setup',\n'setworldcoordinates','textinput','title','tracer','turtles','update',\n'window_height','window_width']\n\n_tg_turtle_functions=['back','backward','begin_fill','begin_poly','bk',\n'circle','clear','clearstamp','clearstamps','clone','color',\n'degrees','distance','dot','down','end_fill','end_poly','fd',\n'fillcolor','filling','forward','get_poly','getpen','getscreen','get_shapepoly',\n'getturtle','goto','heading','hideturtle','home','ht','isdown',\n'isvisible','left','lt','onclick','ondrag','onrelease','pd',\n'pen','pencolor','pendown','pensize','penup','pos','position',\n'pu','radians','right','reset','resizemode','rt',\n'seth','setheading','setpos','setposition','settiltangle',\n'setundobuffer','setx','sety','shape','shapesize','shapetransform','shearfactor','showturtle',\n'speed','st','stamp','tilt','tiltangle','towards',\n'turtlesize','undo','undobufferentries','up','width',\n'write','xcor','ycor']\n\n\n__all__=(_tg_screen_functions+_tg_turtle_functions+\n['done','restart','replay_scene','Turtle','Screen'])\n\n\n\n\n\n__func_body=\"\"\"\\\ndef {name}{paramslist}:\n if {obj} is None:\n {obj} = {init}\n return {obj}.{name}{argslist}\n\"\"\"\ndef _make_global_funcs(functions,cls,obj,init):\n for methodname in functions:\n try :\n method=getattr(cls,methodname)\n except AttributeError:\n print(\"methodname missing:\",methodname)\n continue\n pl1,pl2=getmethparlist(method)\n defstr=__func_body.format(obj=obj,init=init,name=methodname,\n paramslist=pl1,argslist=pl2)\n exec(defstr,globals())\n \n_make_global_funcs(_tg_turtle_functions,Turtle,'Turtle._pen','Turtle()')\n\n_make_global_funcs(_tg_screen_functions,Screen,'Turtle.screen','Screen()')\n", ["_svg", "browser", "browser.html", "browser.timer", "copy", "inspect", "math", "sys"]], "types": [".py", "''\n\n\nimport sys\n\n\n\n\n\n\ndef _f():pass\nFunctionType=type(_f)\nLambdaType=type(lambda :None )\nCodeType=type(_f.__code__)\nMappingProxyType=type(type.__dict__)\nSimpleNamespace=type(sys.implementation)\n\ndef _g():\n yield 1\nGeneratorType=type(_g())\n\nasync def _c():pass\n_c=_c()\nCoroutineType=type(_c)\n_c.close()\n\nasync def _ag():\n yield\n_ag=_ag()\nAsyncGeneratorType=type(_ag)\n\nclass _C:\n def _m(self):pass\nMethodType=type(_C()._m)\n\nBuiltinFunctionType=type(len)\nBuiltinMethodType=type([].append)\n\nWrapperDescriptorType=type(object.__init__)\nMethodWrapperType=type(object().__str__)\nMethodDescriptorType=type(str.join)\nClassMethodDescriptorType=type(dict.__dict__['fromkeys'])\n\nModuleType=type(sys)\n\ntry :\n raise TypeError\nexcept TypeError:\n tb=sys.exc_info()[2]\n TracebackType=type(tb)\n FrameType=type(tb.tb_frame)\n tb=None ;del tb\n \n \nGetSetDescriptorType=type(FunctionType.__code__)\nMemberDescriptorType=type(FunctionType.__globals__)\n\ndel sys,_f,_g,_C,_c,\n\n\n\ndef new_class(name,bases=(),kwds=None ,exec_body=None ):\n ''\n resolved_bases=resolve_bases(bases)\n meta,ns,kwds=prepare_class(name,resolved_bases,kwds)\n if exec_body is not None :\n exec_body(ns)\n if resolved_bases is not bases:\n ns['__orig_bases__']=bases\n return meta(name,resolved_bases,ns,**kwds)\n \ndef resolve_bases(bases):\n ''\n new_bases=list(bases)\n updated=False\n shift=0\n for i,base in enumerate(bases):\n if isinstance(base,type):\n continue\n if not hasattr(base,\"__mro_entries__\"):\n continue\n new_base=base.__mro_entries__(bases)\n updated=True\n if not isinstance(new_base,tuple):\n raise TypeError(\"__mro_entries__ must return a tuple\")\n else :\n new_bases[i+shift:i+shift+1]=new_base\n shift +=len(new_base)-1\n if not updated:\n return bases\n return tuple(new_bases)\n \ndef prepare_class(name,bases=(),kwds=None ):\n ''\n\n\n\n\n\n\n\n\n \n if kwds is None :\n kwds={}\n else :\n kwds=dict(kwds)\n if 'metaclass'in kwds:\n meta=kwds.pop('metaclass')\n else :\n if bases:\n meta=type(bases[0])\n else :\n meta=type\n if isinstance(meta,type):\n \n \n meta=_calculate_meta(meta,bases)\n if hasattr(meta,'__prepare__'):\n ns=meta.__prepare__(name,bases,**kwds)\n else :\n ns={}\n return meta,ns,kwds\n \ndef _calculate_meta(meta,bases):\n ''\n winner=meta\n for base in bases:\n base_meta=type(base)\n if issubclass(winner,base_meta):\n continue\n if issubclass(base_meta,winner):\n winner=base_meta\n continue\n \n raise TypeError(\"metaclass conflict: \"\n \"the metaclass of a derived class \"\n \"must be a (non-strict) subclass \"\n \"of the metaclasses of all its bases\")\n return winner\n \nclass DynamicClassAttribute:\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,fget=None ,fset=None ,fdel=None ,doc=None ):\n self.fget=fget\n self.fset=fset\n self.fdel=fdel\n \n self.__doc__=doc or fget.__doc__\n self.overwrite_doc=doc is None\n \n self.__isabstractmethod__=bool(getattr(fget,'__isabstractmethod__',False ))\n \n def __get__(self,instance,ownerclass=None ):\n if instance is None :\n if self.__isabstractmethod__:\n return self\n raise AttributeError()\n elif self.fget is None :\n raise AttributeError(\"unreadable attribute\")\n return self.fget(instance)\n \n def __set__(self,instance,value):\n if self.fset is None :\n raise AttributeError(\"can't set attribute\")\n self.fset(instance,value)\n \n def __delete__(self,instance):\n if self.fdel is None :\n raise AttributeError(\"can't delete attribute\")\n self.fdel(instance)\n \n def getter(self,fget):\n fdoc=fget.__doc__ if self.overwrite_doc else None\n result=type(self)(fget,self.fset,self.fdel,fdoc or self.__doc__)\n result.overwrite_doc=self.overwrite_doc\n return result\n \n def setter(self,fset):\n result=type(self)(self.fget,fset,self.fdel,self.__doc__)\n result.overwrite_doc=self.overwrite_doc\n return result\n \n def deleter(self,fdel):\n result=type(self)(self.fget,self.fset,fdel,self.__doc__)\n result.overwrite_doc=self.overwrite_doc\n return result\n \n \nclass _GeneratorWrapper:\n\n def __init__(self,gen):\n self.__wrapped=gen\n self.__isgen=gen.__class__ is GeneratorType\n self.__name__=getattr(gen,'__name__',None )\n self.__qualname__=getattr(gen,'__qualname__',None )\n def send(self,val):\n return self.__wrapped.send(val)\n def throw(self,tp,*rest):\n return self.__wrapped.throw(tp,*rest)\n def close(self):\n return self.__wrapped.close()\n @property\n def gi_code(self):\n return self.__wrapped.gi_code\n @property\n def gi_frame(self):\n return self.__wrapped.gi_frame\n @property\n def gi_running(self):\n return self.__wrapped.gi_running\n @property\n def gi_yieldfrom(self):\n return self.__wrapped.gi_yieldfrom\n cr_code=gi_code\n cr_frame=gi_frame\n cr_running=gi_running\n cr_await=gi_yieldfrom\n def __next__(self):\n return next(self.__wrapped)\n def __iter__(self):\n if self.__isgen:\n return self.__wrapped\n return self\n __await__=__iter__\n \ndef coroutine(func):\n ''\n \n if not callable(func):\n raise TypeError('types.coroutine() expects a callable')\n \n if (func.__class__ is FunctionType and\n getattr(func,'__code__',None ).__class__ is CodeType):\n \n co_flags=func.__code__.co_flags\n \n \n \n if co_flags&0x180:\n return func\n \n \n \n if co_flags&0x20:\n \n co=func.__code__\n func.__code__=CodeType(\n co.co_argcount,co.co_kwonlyargcount,co.co_nlocals,\n co.co_stacksize,\n co.co_flags |0x100,\n co.co_code,\n co.co_consts,co.co_names,co.co_varnames,co.co_filename,\n co.co_name,co.co_firstlineno,co.co_lnotab,co.co_freevars,\n co.co_cellvars)\n return func\n \n \n \n \n \n \n import functools\n import _collections_abc\n @functools.wraps(func)\n def wrapped(*args,**kwargs):\n coro=func(*args,**kwargs)\n if (coro.__class__ is CoroutineType or\n coro.__class__ is GeneratorType and coro.gi_code.co_flags&0x100):\n \n return coro\n if (isinstance(coro,_collections_abc.Generator)and\n not isinstance(coro,_collections_abc.Coroutine)):\n \n \n \n return _GeneratorWrapper(coro)\n \n \n return coro\n \n return wrapped\n \n \n__all__=[n for n in globals()if n[:1]!='_']\n", ["_collections_abc", "functools", "sys"]], "typing": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport abc\nfrom abc import abstractmethod,abstractproperty\nimport collections\nimport collections.abc\nimport contextlib\nimport functools\nimport operator\nimport re as stdlib_re\nimport sys\nimport types\nfrom types import WrapperDescriptorType,MethodWrapperType,MethodDescriptorType\n\n\n__all__=[\n\n'Any',\n'Callable',\n'ClassVar',\n'Generic',\n'Optional',\n'Tuple',\n'Type',\n'TypeVar',\n'Union',\n\n\n'AbstractSet',\n'ByteString',\n'Container',\n'ContextManager',\n'Hashable',\n'ItemsView',\n'Iterable',\n'Iterator',\n'KeysView',\n'Mapping',\n'MappingView',\n'MutableMapping',\n'MutableSequence',\n'MutableSet',\n'Sequence',\n'Sized',\n'ValuesView',\n'Awaitable',\n'AsyncIterator',\n'AsyncIterable',\n'Coroutine',\n'Collection',\n'AsyncGenerator',\n'AsyncContextManager',\n\n\n'Reversible',\n'SupportsAbs',\n'SupportsBytes',\n'SupportsComplex',\n'SupportsFloat',\n'SupportsInt',\n'SupportsRound',\n\n\n'Counter',\n'Deque',\n'Dict',\n'DefaultDict',\n'List',\n'Set',\n'FrozenSet',\n'NamedTuple',\n'Generator',\n\n\n'AnyStr',\n'cast',\n'get_type_hints',\n'NewType',\n'no_type_check',\n'no_type_check_decorator',\n'NoReturn',\n'overload',\n'Text',\n'TYPE_CHECKING',\n]\n\n\n\n\n\n\ndef _type_check(arg,msg,is_argument=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n invalid_generic_forms=(Generic,_Protocol)\n if is_argument:\n invalid_generic_forms=invalid_generic_forms+(ClassVar,)\n \n if arg is None :\n return type(None )\n if isinstance(arg,str):\n return ForwardRef(arg)\n if (isinstance(arg,_GenericAlias)and\n arg.__origin__ in invalid_generic_forms):\n raise TypeError(f\"{arg} is not valid as type argument\")\n if (isinstance(arg,_SpecialForm)and arg is not Any or\n arg in (Generic,_Protocol)):\n raise TypeError(f\"Plain {arg} is not valid as type argument\")\n if isinstance(arg,(type,TypeVar,ForwardRef)):\n return arg\n if not callable(arg):\n raise TypeError(f\"{msg} Got {arg!r:.100}.\")\n return arg\n \n \ndef _type_repr(obj):\n ''\n\n\n\n\n\n \n if isinstance(obj,type):\n if obj.__module__ =='builtins':\n return obj.__qualname__\n return f'{obj.__module__}.{obj.__qualname__}'\n if obj is ...:\n return ('...')\n if isinstance(obj,types.FunctionType):\n return obj.__name__\n return repr(obj)\n \n \ndef _collect_type_vars(types):\n ''\n\n\n\n \n tvars=[]\n for t in types:\n if isinstance(t,TypeVar)and t not in tvars:\n tvars.append(t)\n if isinstance(t,_GenericAlias)and not t._special:\n tvars.extend([t for t in t.__parameters__ if t not in tvars])\n return tuple(tvars)\n \n \ndef _subs_tvars(tp,tvars,subs):\n ''\n\n \n if not isinstance(tp,_GenericAlias):\n return tp\n new_args=list(tp.__args__)\n for a,arg in enumerate(tp.__args__):\n if isinstance(arg,TypeVar):\n for i,tvar in enumerate(tvars):\n if arg ==tvar:\n new_args[a]=subs[i]\n else :\n new_args[a]=_subs_tvars(arg,tvars,subs)\n if tp.__origin__ is Union:\n return Union[tuple(new_args)]\n return tp.copy_with(tuple(new_args))\n \n \ndef _check_generic(cls,parameters):\n ''\n\n \n if not cls.__parameters__:\n raise TypeError(f\"{cls} is not a generic class\")\n alen=len(parameters)\n elen=len(cls.__parameters__)\n if alen !=elen:\n raise TypeError(f\"Too {'many' if alen > elen else 'few'} parameters for {cls};\"\n f\" actual {alen}, expected {elen}\")\n \n \ndef _remove_dups_flatten(parameters):\n ''\n\n \n \n params=[]\n for p in parameters:\n if isinstance(p,_GenericAlias)and p.__origin__ is Union:\n params.extend(p.__args__)\n elif isinstance(p,tuple)and len(p)>0 and p[0]is Union:\n params.extend(p[1:])\n else :\n params.append(p)\n \n all_params=set(params)\n if len(all_params) NoReturn:\n raise Exception('no way')\n\n This type is invalid in other positions, e.g., ``List[NoReturn]``\n will fail in static type checkers.\n \"\"\")\n\nClassVar=_SpecialForm('ClassVar',doc=\n\"\"\"Special type construct to mark class variables.\n\n An annotation wrapped in ClassVar indicates that a given\n attribute is intended to be used as a class variable and\n should not be set on instances of that class. Usage::\n\n class Starship:\n stats: ClassVar[Dict[str, int]] = {} # class variable\n damage: int = 10 # instance variable\n\n ClassVar accepts only types and cannot be further subscribed.\n\n Note that ClassVar is not a class itself, and should not\n be used with isinstance() or issubclass().\n \"\"\")\n\nUnion=_SpecialForm('Union',doc=\n\"\"\"Union type; Union[X, Y] means either X or Y.\n\n To define a union, use e.g. Union[int, str]. Details:\n - The arguments must be types and there must be at least one.\n - None as an argument is a special case and is replaced by\n type(None).\n - Unions of unions are flattened, e.g.::\n\n Union[Union[int, str], float] == Union[int, str, float]\n\n - Unions of a single argument vanish, e.g.::\n\n Union[int] == int # The constructor actually returns int\n\n - Redundant arguments are skipped, e.g.::\n\n Union[int, str, int] == Union[int, str]\n\n - When comparing unions, the argument order is ignored, e.g.::\n\n Union[int, str] == Union[str, int]\n\n - You cannot subclass or instantiate a union.\n - You can use Optional[X] as a shorthand for Union[X, None].\n \"\"\")\n\nOptional=_SpecialForm('Optional',doc=\n\"\"\"Optional type.\n\n Optional[X] is equivalent to Union[X, None].\n \"\"\")\n\n\nclass ForwardRef(_Final,_root=True ):\n ''\n \n __slots__=('__forward_arg__','__forward_code__',\n '__forward_evaluated__','__forward_value__',\n '__forward_is_argument__')\n \n def __init__(self,arg,is_argument=True ):\n if not isinstance(arg,str):\n raise TypeError(f\"Forward reference must be a string -- got {arg!r}\")\n try :\n code=compile(arg,'','eval')\n except SyntaxError:\n raise SyntaxError(f\"Forward reference must be an expression -- got {arg!r}\")\n self.__forward_arg__=arg\n self.__forward_code__=code\n self.__forward_evaluated__=False\n self.__forward_value__=None\n self.__forward_is_argument__=is_argument\n \n def _evaluate(self,globalns,localns):\n if not self.__forward_evaluated__ or localns is not globalns:\n if globalns is None and localns is None :\n globalns=localns={}\n elif globalns is None :\n globalns=localns\n elif localns is None :\n localns=globalns\n self.__forward_value__=_type_check(\n eval(self.__forward_code__,globalns,localns),\n \"Forward references must evaluate to types.\",\n is_argument=self.__forward_is_argument__)\n self.__forward_evaluated__=True\n return self.__forward_value__\n \n def __eq__(self,other):\n if not isinstance(other,ForwardRef):\n return NotImplemented\n return (self.__forward_arg__ ==other.__forward_arg__ and\n self.__forward_value__ ==other.__forward_value__)\n \n def __hash__(self):\n return hash((self.__forward_arg__,self.__forward_value__))\n \n def __repr__(self):\n return f'ForwardRef({self.__forward_arg__!r})'\n \n \nclass TypeVar(_Final,_Immutable,_root=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('__name__','__bound__','__constraints__',\n '__covariant__','__contravariant__')\n \n def __init__(self,name,*constraints,bound=None ,\n covariant=False ,contravariant=False ):\n self.__name__=name\n if covariant and contravariant:\n raise ValueError(\"Bivariant types are not supported.\")\n self.__covariant__=bool(covariant)\n self.__contravariant__=bool(contravariant)\n if constraints and bound is not None :\n raise TypeError(\"Constraints cannot be combined with bound=...\")\n if constraints and len(constraints)==1:\n raise TypeError(\"A single constraint is not allowed\")\n msg=\"TypeVar(name, constraint, ...): constraints must be types.\"\n self.__constraints__=tuple(_type_check(t,msg)for t in constraints)\n if bound:\n self.__bound__=_type_check(bound,\"Bound must be a type.\")\n else :\n self.__bound__=None\n def_mod=sys._getframe(1).f_globals['__name__']\n if def_mod !='typing':\n self.__module__=def_mod\n \n def __repr__(self):\n if self.__covariant__:\n prefix='+'\n elif self.__contravariant__:\n prefix='-'\n else :\n prefix='~'\n return prefix+self.__name__\n \n def __reduce__(self):\n return self.__name__\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n_normalize_alias={'list':'List',\n'tuple':'Tuple',\n'dict':'Dict',\n'set':'Set',\n'frozenset':'FrozenSet',\n'deque':'Deque',\n'defaultdict':'DefaultDict',\n'type':'Type',\n'Set':'AbstractSet'}\n\ndef _is_dunder(attr):\n return attr.startswith('__')and attr.endswith('__')\n \n \nclass _GenericAlias(_Final,_root=True ):\n ''\n\n\n\n\n\n\n \n def __init__(self,origin,params,*,inst=True ,special=False ,name=None ):\n self._inst=inst\n self._special=special\n if special and name is None :\n orig_name=origin.__name__\n name=_normalize_alias.get(orig_name,orig_name)\n self._name=name\n if not isinstance(params,tuple):\n params=(params,)\n self.__origin__=origin\n self.__args__=tuple(...if a is _TypingEllipsis else\n ()if a is _TypingEmpty else\n a for a in params)\n self.__parameters__=_collect_type_vars(params)\n self.__slots__=None\n if not name:\n self.__module__=origin.__module__\n \n @_tp_cache\n def __getitem__(self,params):\n if self.__origin__ in (Generic,_Protocol):\n \n raise TypeError(f\"Cannot subscript already-subscripted {self}\")\n if not isinstance(params,tuple):\n params=(params,)\n msg=\"Parameters to generic types must be types.\"\n params=tuple(_type_check(p,msg)for p in params)\n _check_generic(self,params)\n return _subs_tvars(self,self.__parameters__,params)\n \n def copy_with(self,params):\n \n return _GenericAlias(self.__origin__,params,name=self._name,inst=self._inst)\n \n def __repr__(self):\n if (self._name !='Callable'or\n len(self.__args__)==2 and self.__args__[0]is Ellipsis):\n if self._name:\n name='typing.'+self._name\n else :\n name=_type_repr(self.__origin__)\n if not self._special:\n args=f'[{\", \".join([_type_repr(a) for a in self.__args__])}]'\n else :\n args=''\n return (f'{name}{args}')\n if self._special:\n return 'typing.Callable'\n return (f'typing.Callable'\n f'[[{\", \".join([_type_repr(a) for a in self.__args__[:-1]])}], '\n f'{_type_repr(self.__args__[-1])}]')\n \n def __eq__(self,other):\n if not isinstance(other,_GenericAlias):\n return NotImplemented\n if self.__origin__ !=other.__origin__:\n return False\n if self.__origin__ is Union and other.__origin__ is Union:\n return frozenset(self.__args__)==frozenset(other.__args__)\n return self.__args__ ==other.__args__\n \n def __hash__(self):\n if self.__origin__ is Union:\n return hash((Union,frozenset(self.__args__)))\n return hash((self.__origin__,self.__args__))\n \n def __call__(self,*args,**kwargs):\n if not self._inst:\n raise TypeError(f\"Type {self._name} cannot be instantiated; \"\n f\"use {self._name.lower()}() instead\")\n result=self.__origin__(*args,**kwargs)\n try :\n result.__orig_class__=self\n except AttributeError:\n pass\n return result\n \n def __mro_entries__(self,bases):\n if self._name:\n res=[]\n if self.__origin__ not in bases:\n res.append(self.__origin__)\n i=bases.index(self)\n if not any(isinstance(b,_GenericAlias)or issubclass(b,Generic)\n for b in bases[i+1:]):\n res.append(Generic)\n return tuple(res)\n if self.__origin__ is Generic:\n i=bases.index(self)\n for b in bases[i+1:]:\n if isinstance(b,_GenericAlias)and b is not self:\n return ()\n return (self.__origin__,)\n \n def __getattr__(self,attr):\n \n \n if '__origin__'in self.__dict__ and not _is_dunder(attr):\n return getattr(self.__origin__,attr)\n raise AttributeError(attr)\n \n def __setattr__(self,attr,val):\n if _is_dunder(attr)or attr in ('_name','_inst','_special'):\n super().__setattr__(attr,val)\n else :\n setattr(self.__origin__,attr,val)\n \n def __instancecheck__(self,obj):\n return self.__subclasscheck__(type(obj))\n \n def __subclasscheck__(self,cls):\n if self._special:\n if not isinstance(cls,_GenericAlias):\n return issubclass(cls,self.__origin__)\n if cls._special:\n return issubclass(cls.__origin__,self.__origin__)\n raise TypeError(\"Subscripted generics cannot be used with\"\n \" class and instance checks\")\n \n def __reduce__(self):\n if self._special:\n return self._name\n \n if self._name:\n origin=globals()[self._name]\n else :\n origin=self.__origin__\n if (origin is Callable and\n not (len(self.__args__)==2 and self.__args__[0]is Ellipsis)):\n args=list(self.__args__[:-1]),self.__args__[-1]\n else :\n args=tuple(self.__args__)\n if len(args)==1 and not isinstance(args[0],tuple):\n args,=args\n return operator.getitem,(origin,args)\n \n \nclass _VariadicGenericAlias(_GenericAlias,_root=True ):\n ''\n\n \n def __getitem__(self,params):\n if self._name !='Callable'or not self._special:\n return self.__getitem_inner__(params)\n if not isinstance(params,tuple)or len(params)!=2:\n raise TypeError(\"Callable must be used as \"\n \"Callable[[arg, ...], result].\")\n args,result=params\n if args is Ellipsis:\n params=(Ellipsis,result)\n else :\n if not isinstance(args,list):\n raise TypeError(f\"Callable[args, result]: args must be a list.\"\n f\" Got {args}\")\n params=(tuple(args),result)\n return self.__getitem_inner__(params)\n \n @_tp_cache\n def __getitem_inner__(self,params):\n if self.__origin__ is tuple and self._special:\n if params ==():\n return self.copy_with((_TypingEmpty,))\n if not isinstance(params,tuple):\n params=(params,)\n if len(params)==2 and params[1]is ...:\n msg=\"Tuple[t, ...]: t must be a type.\"\n p=_type_check(params[0],msg)\n return self.copy_with((p,_TypingEllipsis))\n msg=\"Tuple[t0, t1, ...]: each t must be a type.\"\n params=tuple(_type_check(p,msg)for p in params)\n return self.copy_with(params)\n if self.__origin__ is collections.abc.Callable and self._special:\n args,result=params\n msg=\"Callable[args, result]: result must be a type.\"\n result=_type_check(result,msg)\n if args is Ellipsis:\n return self.copy_with((_TypingEllipsis,result))\n msg=\"Callable[[arg, ...], result]: each arg must be a type.\"\n args=tuple(_type_check(arg,msg)for arg in args)\n params=args+(result,)\n return self.copy_with(params)\n return super().__getitem__(params)\n \n \nclass Generic:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n __slots__=()\n \n def __new__(cls,*args,**kwds):\n if cls is Generic:\n raise TypeError(\"Type Generic cannot be instantiated; \"\n \"it can be used only as a base class\")\n if super().__new__ is object.__new__ and cls.__init__ is not object.__init__:\n obj=super().__new__(cls)\n else :\n obj=super().__new__(cls,*args,**kwds)\n return obj\n \n @_tp_cache\n def __class_getitem__(cls,params):\n if not isinstance(params,tuple):\n params=(params,)\n if not params and cls is not Tuple:\n raise TypeError(\n f\"Parameter list to {cls.__qualname__}[...] cannot be empty\")\n msg=\"Parameters to generic types must be types.\"\n params=tuple(_type_check(p,msg)for p in params)\n if cls is Generic:\n \n if not all(isinstance(p,TypeVar)for p in params):\n raise TypeError(\n \"Parameters to Generic[...] must all be type variables\")\n if len(set(params))!=len(params):\n raise TypeError(\n \"Parameters to Generic[...] must all be unique\")\n elif cls is _Protocol:\n \n pass\n else :\n \n _check_generic(cls,params)\n return _GenericAlias(cls,params)\n \n def __init_subclass__(cls,*args,**kwargs):\n super().__init_subclass__(*args,**kwargs)\n tvars=[]\n if '__orig_bases__'in cls.__dict__:\n error=Generic in cls.__orig_bases__\n else :\n error=Generic in cls.__bases__ and cls.__name__ !='_Protocol'\n if error:\n raise TypeError(\"Cannot inherit from plain Generic\")\n if '__orig_bases__'in cls.__dict__:\n tvars=_collect_type_vars(cls.__orig_bases__)\n \n \n \n \n \n gvars=None\n for base in cls.__orig_bases__:\n if (isinstance(base,_GenericAlias)and\n base.__origin__ is Generic):\n if gvars is not None :\n raise TypeError(\n \"Cannot inherit from Generic[...] multiple types.\")\n gvars=base.__parameters__\n if gvars is None :\n gvars=tvars\n else :\n tvarset=set(tvars)\n gvarset=set(gvars)\n if not tvarset <=gvarset:\n s_vars=', '.join(str(t)for t in tvars if t not in gvarset)\n s_args=', '.join(str(g)for g in gvars)\n raise TypeError(f\"Some type variables ({s_vars}) are\"\n f\" not listed in Generic[{s_args}]\")\n tvars=gvars\n cls.__parameters__=tuple(tvars)\n \n \nclass _TypingEmpty:\n ''\n\n\n \n \n \nclass _TypingEllipsis:\n ''\n \n \ndef cast(typ,val):\n ''\n\n\n\n\n\n \n return val\n \n \ndef _get_defaults(func):\n ''\n try :\n code=func.__code__\n except AttributeError:\n \n return {}\n pos_count=code.co_argcount\n arg_names=code.co_varnames\n arg_names=arg_names[:pos_count]\n defaults=func.__defaults__ or ()\n kwdefaults=func.__kwdefaults__\n res=dict(kwdefaults)if kwdefaults else {}\n pos_offset=pos_count -len(defaults)\n for name,value in zip(arg_names[pos_offset:],defaults):\n assert name not in res\n res[name]=value\n return res\n \n \n_allowed_types=(types.FunctionType,types.BuiltinFunctionType,\ntypes.MethodType,types.ModuleType,\nWrapperDescriptorType,MethodWrapperType,MethodDescriptorType)\n\n\ndef get_type_hints(obj,globalns=None ,localns=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if getattr(obj,'__no_type_check__',None ):\n return {}\n \n if isinstance(obj,type):\n hints={}\n for base in reversed(obj.__mro__):\n if globalns is None :\n base_globals=sys.modules[base.__module__].__dict__\n else :\n base_globals=globalns\n ann=base.__dict__.get('__annotations__',{})\n for name,value in ann.items():\n if value is None :\n value=type(None )\n if isinstance(value,str):\n value=ForwardRef(value,is_argument=False )\n value=_eval_type(value,base_globals,localns)\n hints[name]=value\n return hints\n \n if globalns is None :\n if isinstance(obj,types.ModuleType):\n globalns=obj.__dict__\n else :\n globalns=getattr(obj,'__globals__',{})\n if localns is None :\n localns=globalns\n elif localns is None :\n localns=globalns\n hints=getattr(obj,'__annotations__',None )\n if hints is None :\n \n if isinstance(obj,_allowed_types):\n return {}\n else :\n raise TypeError('{!r} is not a module, class, method, '\n 'or function.'.format(obj))\n defaults=_get_defaults(obj)\n hints=dict(hints)\n for name,value in hints.items():\n if value is None :\n value=type(None )\n if isinstance(value,str):\n value=ForwardRef(value)\n value=_eval_type(value,globalns,localns)\n if name in defaults and defaults[name]is None :\n value=Optional[value]\n hints[name]=value\n return hints\n \n \ndef no_type_check(arg):\n ''\n\n\n\n\n\n\n \n if isinstance(arg,type):\n arg_attrs=arg.__dict__.copy()\n for attr,val in arg.__dict__.items():\n if val in arg.__bases__+(arg,):\n arg_attrs.pop(attr)\n for obj in arg_attrs.values():\n if isinstance(obj,types.FunctionType):\n obj.__no_type_check__=True\n if isinstance(obj,type):\n no_type_check(obj)\n try :\n arg.__no_type_check__=True\n except TypeError:\n pass\n return arg\n \n \ndef no_type_check_decorator(decorator):\n ''\n\n\n\n \n \n @functools.wraps(decorator)\n def wrapped_decorator(*args,**kwds):\n func=decorator(*args,**kwds)\n func=no_type_check(func)\n return func\n \n return wrapped_decorator\n \n \ndef _overload_dummy(*args,**kwds):\n ''\n raise NotImplementedError(\n \"You should not call an overloaded function. \"\n \"A series of @overload-decorated functions \"\n \"outside a stub module should always be followed \"\n \"by an implementation that is not @overload-ed.\")\n \n \ndef overload(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _overload_dummy\n \n \nclass _ProtocolMeta(type):\n ''\n\n\n\n \n \n def __instancecheck__(self,obj):\n if _Protocol not in self.__bases__:\n return super().__instancecheck__(obj)\n raise TypeError(\"Protocols cannot be used with isinstance().\")\n \n def __subclasscheck__(self,cls):\n if not self._is_protocol:\n \n return NotImplemented\n \n if self is _Protocol:\n \n return True\n \n \n attrs=self._get_protocol_attrs()\n \n for attr in attrs:\n if not any(attr in d.__dict__ for d in cls.__mro__):\n return False\n return True\n \n def _get_protocol_attrs(self):\n \n protocol_bases=[]\n for c in self.__mro__:\n if getattr(c,'_is_protocol',False )and c.__name__ !='_Protocol':\n protocol_bases.append(c)\n \n \n attrs=set()\n for base in protocol_bases:\n for attr in base.__dict__.keys():\n \n for c in self.__mro__:\n if (c is not base and attr in c.__dict__ and\n not getattr(c,'_is_protocol',False )):\n break\n else :\n if (not attr.startswith('_abc_')and\n attr !='__abstractmethods__'and\n attr !='__annotations__'and\n attr !='__weakref__'and\n attr !='_is_protocol'and\n attr !='_gorg'and\n attr !='__dict__'and\n attr !='__args__'and\n attr !='__slots__'and\n attr !='_get_protocol_attrs'and\n attr !='__next_in_mro__'and\n attr !='__parameters__'and\n attr !='__origin__'and\n attr !='__orig_bases__'and\n attr !='__extra__'and\n attr !='__tree_hash__'and\n attr !='__module__'):\n attrs.add(attr)\n \n return attrs\n \n \nclass _Protocol(Generic,metaclass=_ProtocolMeta):\n ''\n\n\n\n\n \n \n __slots__=()\n \n _is_protocol=True\n \n def __class_getitem__(cls,params):\n return super().__class_getitem__(params)\n \n \n \n \nT=TypeVar('T')\nKT=TypeVar('KT')\nVT=TypeVar('VT')\nT_co=TypeVar('T_co',covariant=True )\nV_co=TypeVar('V_co',covariant=True )\nVT_co=TypeVar('VT_co',covariant=True )\nT_contra=TypeVar('T_contra',contravariant=True )\n\nCT_co=TypeVar('CT_co',covariant=True ,bound=type)\n\n\n\nAnyStr=TypeVar('AnyStr',bytes,str)\n\n\n\ndef _alias(origin,params,inst=True ):\n return _GenericAlias(origin,params,special=True ,inst=inst)\n \nHashable=_alias(collections.abc.Hashable,())\nAwaitable=_alias(collections.abc.Awaitable,T_co)\nCoroutine=_alias(collections.abc.Coroutine,(T_co,T_contra,V_co))\nAsyncIterable=_alias(collections.abc.AsyncIterable,T_co)\nAsyncIterator=_alias(collections.abc.AsyncIterator,T_co)\nIterable=_alias(collections.abc.Iterable,T_co)\nIterator=_alias(collections.abc.Iterator,T_co)\nReversible=_alias(collections.abc.Reversible,T_co)\nSized=_alias(collections.abc.Sized,())\nContainer=_alias(collections.abc.Container,T_co)\nCollection=_alias(collections.abc.Collection,T_co)\nCallable=_VariadicGenericAlias(collections.abc.Callable,(),special=True )\nCallable.__doc__=\\\n\"\"\"Callable type; Callable[[int], str] is a function of (int) -> str.\n\n The subscription syntax must always be used with exactly two\n values: the argument list and the return type. The argument list\n must be a list of types or ellipsis; the return type must be a single type.\n\n There is no syntax to indicate optional or keyword arguments,\n such function types are rarely used as callback types.\n \"\"\"\nAbstractSet=_alias(collections.abc.Set,T_co)\nMutableSet=_alias(collections.abc.MutableSet,T)\n\nMapping=_alias(collections.abc.Mapping,(KT,VT_co))\nMutableMapping=_alias(collections.abc.MutableMapping,(KT,VT))\nSequence=_alias(collections.abc.Sequence,T_co)\nMutableSequence=_alias(collections.abc.MutableSequence,T)\nByteString=_alias(collections.abc.ByteString,())\nTuple=_VariadicGenericAlias(tuple,(),inst=False ,special=True )\nTuple.__doc__=\\\n\"\"\"Tuple type; Tuple[X, Y] is the cross-product type of X and Y.\n\n Example: Tuple[T1, T2] is a tuple of two elements corresponding\n to type variables T1 and T2. Tuple[int, float, str] is a tuple\n of an int, a float and a string.\n\n To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].\n \"\"\"\nList=_alias(list,T,inst=False )\nDeque=_alias(collections.deque,T)\nSet=_alias(set,T,inst=False )\nFrozenSet=_alias(frozenset,T_co,inst=False )\nMappingView=_alias(collections.abc.MappingView,T_co)\nKeysView=_alias(collections.abc.KeysView,KT)\nItemsView=_alias(collections.abc.ItemsView,(KT,VT_co))\nValuesView=_alias(collections.abc.ValuesView,VT_co)\nContextManager=_alias(contextlib.AbstractContextManager,T_co)\nAsyncContextManager=_alias(contextlib.AbstractAsyncContextManager,T_co)\nDict=_alias(dict,(KT,VT),inst=False )\nDefaultDict=_alias(collections.defaultdict,(KT,VT))\nCounter=_alias(collections.Counter,T)\nChainMap=_alias(collections.ChainMap,(KT,VT))\nGenerator=_alias(collections.abc.Generator,(T_co,T_contra,V_co))\nAsyncGenerator=_alias(collections.abc.AsyncGenerator,(T_co,T_contra))\nType=_alias(type,CT_co,inst=False )\nType.__doc__=\\\n\"\"\"A special construct usable to annotate class objects.\n\n For example, suppose we have the following classes::\n\n class User: ... # Abstract base for User classes\n class BasicUser(User): ...\n class ProUser(User): ...\n class TeamUser(User): ...\n\n And a function that takes a class argument that's a subclass of\n User and returns an instance of the corresponding class::\n\n U = TypeVar('U', bound=User)\n def new_user(user_class: Type[U]) -> U:\n user = user_class()\n # (Here we could write the user object to a database)\n return user\n\n joe = new_user(BasicUser)\n\n At this point the type checker knows that joe has type BasicUser.\n \"\"\"\n\n\nclass SupportsInt(_Protocol):\n __slots__=()\n \n @abstractmethod\n def __int__(self)->int:\n pass\n \n \nclass SupportsFloat(_Protocol):\n __slots__=()\n \n @abstractmethod\n def __float__(self)->float:\n pass\n \n \nclass SupportsComplex(_Protocol):\n __slots__=()\n \n @abstractmethod\n def __complex__(self)->complex:\n pass\n \n \nclass SupportsBytes(_Protocol):\n __slots__=()\n \n @abstractmethod\n def __bytes__(self)->bytes:\n pass\n \n \nclass SupportsAbs(_Protocol[T_co]):\n __slots__=()\n \n @abstractmethod\n def __abs__(self)->T_co:\n pass\n \n \nclass SupportsRound(_Protocol[T_co]):\n __slots__=()\n \n @abstractmethod\n def __round__(self,ndigits:int=0)->T_co:\n pass\n \n \ndef _make_nmtuple(name,types):\n msg=\"NamedTuple('Name', [(f0, t0), (f1, t1), ...]); each t must be a type\"\n types=[(n,_type_check(t,msg))for n,t in types]\n nm_tpl=collections.namedtuple(name,[n for n,t in types])\n \n \n nm_tpl.__annotations__=nm_tpl._field_types=collections.OrderedDict(types)\n try :\n nm_tpl.__module__=sys._getframe(2).f_globals.get('__name__','__main__')\n except (AttributeError,ValueError):\n pass\n return nm_tpl\n \n \n \n_prohibited=('__new__','__init__','__slots__','__getnewargs__',\n'_fields','_field_defaults','_field_types',\n'_make','_replace','_asdict','_source')\n\n_special=('__module__','__name__','__qualname__','__annotations__')\n\n\nclass NamedTupleMeta(type):\n\n def __new__(cls,typename,bases,ns):\n if ns.get('_root',False ):\n return super().__new__(cls,typename,bases,ns)\n types=ns.get('__annotations__',{})\n nm_tpl=_make_nmtuple(typename,types.items())\n defaults=[]\n defaults_dict={}\n for field_name in types:\n if field_name in ns:\n default_value=ns[field_name]\n defaults.append(default_value)\n defaults_dict[field_name]=default_value\n elif defaults:\n raise TypeError(\"Non-default namedtuple field {field_name} cannot \"\n \"follow default field(s) {default_names}\"\n .format(field_name=field_name,\n default_names=', '.join(defaults_dict.keys())))\n nm_tpl.__new__.__annotations__=collections.OrderedDict(types)\n nm_tpl.__new__.__defaults__=tuple(defaults)\n nm_tpl._field_defaults=defaults_dict\n \n for key in ns:\n if key in _prohibited:\n raise AttributeError(\"Cannot overwrite NamedTuple attribute \"+key)\n elif key not in _special and key not in nm_tpl._fields:\n setattr(nm_tpl,key,ns[key])\n return nm_tpl\n \n \nclass NamedTuple(metaclass=NamedTupleMeta):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _root=True\n \n def __new__(self,typename,fields=None ,**kwargs):\n if fields is None :\n fields=kwargs.items()\n elif kwargs:\n raise TypeError(\"Either list of fields or keywords\"\n \" can be provided to NamedTuple, not both\")\n return _make_nmtuple(typename,fields)\n \n \ndef NewType(name,tp):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def new_type(x):\n return x\n \n new_type.__name__=name\n new_type.__supertype__=tp\n return new_type\n \n \n \nText=str\n\n\n\nTYPE_CHECKING=False\n\n\nclass IO(Generic[AnyStr]):\n ''\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractproperty\n def mode(self)->str:\n pass\n \n @abstractproperty\n def name(self)->str:\n pass\n \n @abstractmethod\n def close(self)->None :\n pass\n \n @abstractmethod\n def closed(self)->bool:\n pass\n \n @abstractmethod\n def fileno(self)->int:\n pass\n \n @abstractmethod\n def flush(self)->None :\n pass\n \n @abstractmethod\n def isatty(self)->bool:\n pass\n \n @abstractmethod\n def read(self,n:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readable(self)->bool:\n pass\n \n @abstractmethod\n def readline(self,limit:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readlines(self,hint:int=-1)->List[AnyStr]:\n pass\n \n @abstractmethod\n def seek(self,offset:int,whence:int=0)->int:\n pass\n \n @abstractmethod\n def seekable(self)->bool:\n pass\n \n @abstractmethod\n def tell(self)->int:\n pass\n \n @abstractmethod\n def truncate(self,size:int=None )->int:\n pass\n \n @abstractmethod\n def writable(self)->bool:\n pass\n \n @abstractmethod\n def write(self,s:AnyStr)->int:\n pass\n \n @abstractmethod\n def writelines(self,lines:List[AnyStr])->None :\n pass\n \n @abstractmethod\n def __enter__(self)->'IO[AnyStr]':\n pass\n \n @abstractmethod\n def __exit__(self,type,value,traceback)->None :\n pass\n \n \nclass BinaryIO(IO[bytes]):\n ''\n \n __slots__=()\n \n @abstractmethod\n def write(self,s:Union[bytes,bytearray])->int:\n pass\n \n @abstractmethod\n def __enter__(self)->'BinaryIO':\n pass\n \n \nclass TextIO(IO[str]):\n ''\n \n __slots__=()\n \n @abstractproperty\n def buffer(self)->BinaryIO:\n pass\n \n @abstractproperty\n def encoding(self)->str:\n pass\n \n @abstractproperty\n def errors(self)->Optional[str]:\n pass\n \n @abstractproperty\n def line_buffering(self)->bool:\n pass\n \n @abstractproperty\n def newlines(self)->Any:\n pass\n \n @abstractmethod\n def __enter__(self)->'TextIO':\n pass\n \n \nclass io:\n ''\n \n __all__=['IO','TextIO','BinaryIO']\n IO=IO\n TextIO=TextIO\n BinaryIO=BinaryIO\n \n \nio.__name__=__name__+'.io'\nsys.modules[io.__name__]=io\n\nPattern=_alias(stdlib_re.Pattern,AnyStr)\nMatch=_alias(stdlib_re.Match,AnyStr)\n\nclass re:\n ''\n \n __all__=['Pattern','Match']\n Pattern=Pattern\n Match=Match\n \n \nre.__name__=__name__+'.re'\nsys.modules[re.__name__]=re\n", ["abc", "collections", "collections.abc", "contextlib", "functools", "operator", "re", "sys", "types"]], "uuid": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport os\nimport sys\n\nfrom enum import Enum\n\n\n__author__='Ka-Ping Yee '\n\nRESERVED_NCS,RFC_4122,RESERVED_MICROSOFT,RESERVED_FUTURE=[\n'reserved for NCS compatibility','specified in RFC 4122',\n'reserved for Microsoft compatibility','reserved for future definition']\n\nint_=int\nbytes_=bytes\n\n\nclass SafeUUID(Enum):\n safe=0\n unsafe=-1\n unknown=None\n \n \nclass UUID:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,hex=None ,bytes=None ,bytes_le=None ,fields=None ,\n int=None ,version=None ,\n *,is_safe=SafeUUID.unknown):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if [hex,bytes,bytes_le,fields,int].count(None )!=4:\n raise TypeError('one of the hex, bytes, bytes_le, fields, '\n 'or int arguments must be given')\n if hex is not None :\n hex=hex.replace('urn:','').replace('uuid:','')\n hex=hex.strip('{}').replace('-','')\n if len(hex)!=32:\n raise ValueError('badly formed hexadecimal UUID string')\n int=int_(hex,16)\n if bytes_le is not None :\n if len(bytes_le)!=16:\n raise ValueError('bytes_le is not a 16-char string')\n bytes=(bytes_le[4 -1::-1]+bytes_le[6 -1:4 -1:-1]+\n bytes_le[8 -1:6 -1:-1]+bytes_le[8:])\n if bytes is not None :\n if len(bytes)!=16:\n raise ValueError('bytes is not a 16-char string')\n assert isinstance(bytes,bytes_),repr(bytes)\n int=int_.from_bytes(bytes,byteorder='big')\n if fields is not None :\n if len(fields)!=6:\n raise ValueError('fields is not a 6-tuple')\n (time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node)=fields\n if not 0 <=time_low <1 <<32:\n raise ValueError('field 1 out of range (need a 32-bit value)')\n if not 0 <=time_mid <1 <<16:\n raise ValueError('field 2 out of range (need a 16-bit value)')\n if not 0 <=time_hi_version <1 <<16:\n raise ValueError('field 3 out of range (need a 16-bit value)')\n if not 0 <=clock_seq_hi_variant <1 <<8:\n raise ValueError('field 4 out of range (need an 8-bit value)')\n if not 0 <=clock_seq_low <1 <<8:\n raise ValueError('field 5 out of range (need an 8-bit value)')\n if not 0 <=node <1 <<48:\n raise ValueError('field 6 out of range (need a 48-bit value)')\n clock_seq=(clock_seq_hi_variant <<8)|clock_seq_low\n int=((time_low <<96)|(time_mid <<80)|\n (time_hi_version <<64)|(clock_seq <<48)|node)\n if int is not None :\n if not 0 <=int <1 <<128:\n raise ValueError('int is out of range (need a 128-bit value)')\n if version is not None :\n if not 1 <=version <=5:\n raise ValueError('illegal version number')\n \n int &=~(0xc000 <<48)\n int |=0x8000 <<48\n \n int &=~(0xf000 <<64)\n int |=version <<76\n self.__dict__['int']=int\n self.__dict__['is_safe']=is_safe\n \n def __eq__(self,other):\n if isinstance(other,UUID):\n return self.int ==other.int\n return NotImplemented\n \n \n \n \n def __lt__(self,other):\n if isinstance(other,UUID):\n return self.int other.int\n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,UUID):\n return self.int <=other.int\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,UUID):\n return self.int >=other.int\n return NotImplemented\n \n def __hash__(self):\n return hash(self.int)\n \n def __int__(self):\n return self.int\n \n def __repr__(self):\n return '%s(%r)'%(self.__class__.__name__,str(self))\n \n def __setattr__(self,name,value):\n raise TypeError('UUID objects are immutable')\n \n def __str__(self):\n hex='%032x'%self.int\n return '%s-%s-%s-%s-%s'%(\n hex[:8],hex[8:12],hex[12:16],hex[16:20],hex[20:])\n \n @property\n def bytes(self):\n return self.int.to_bytes(16,'big')\n \n @property\n def bytes_le(self):\n bytes=self.bytes\n return (bytes[4 -1::-1]+bytes[6 -1:4 -1:-1]+bytes[8 -1:6 -1:-1]+\n bytes[8:])\n \n @property\n def fields(self):\n return (self.time_low,self.time_mid,self.time_hi_version,\n self.clock_seq_hi_variant,self.clock_seq_low,self.node)\n \n @property\n def time_low(self):\n return self.int >>96\n \n @property\n def time_mid(self):\n return (self.int >>80)&0xffff\n \n @property\n def time_hi_version(self):\n return (self.int >>64)&0xffff\n \n @property\n def clock_seq_hi_variant(self):\n return (self.int >>56)&0xff\n \n @property\n def clock_seq_low(self):\n return (self.int >>48)&0xff\n \n @property\n def time(self):\n return (((self.time_hi_version&0x0fff)<<48)|\n (self.time_mid <<32)|self.time_low)\n \n @property\n def clock_seq(self):\n return (((self.clock_seq_hi_variant&0x3f)<<8)|\n self.clock_seq_low)\n \n @property\n def node(self):\n return self.int&0xffffffffffff\n \n @property\n def hex(self):\n return '%032x'%self.int\n \n @property\n def urn(self):\n return 'urn:uuid:'+str(self)\n \n @property\n def variant(self):\n if not self.int&(0x8000 <<48):\n return RESERVED_NCS\n elif not self.int&(0x4000 <<48):\n return RFC_4122\n elif not self.int&(0x2000 <<48):\n return RESERVED_MICROSOFT\n else :\n return RESERVED_FUTURE\n \n @property\n def version(self):\n \n if self.variant ==RFC_4122:\n return int((self.int >>76)&0xf)\n \ndef _popen(command,*args):\n import os,shutil,subprocess\n executable=shutil.which(command)\n if executable is None :\n path=os.pathsep.join(('/sbin','/usr/sbin'))\n executable=shutil.which(command,path=path)\n if executable is None :\n return None\n \n \n \n env=dict(os.environ)\n env['LC_ALL']='C'\n proc=subprocess.Popen((executable,)+args,\n stdout=subprocess.PIPE,\n stderr=subprocess.DEVNULL,\n env=env)\n return proc\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _is_universal(mac):\n return not (mac&(1 <<41))\n \ndef _find_mac(command,args,hw_identifiers,get_index):\n first_local_mac=None\n try :\n proc=_popen(command,*args.split())\n if not proc:\n return None\n with proc:\n for line in proc.stdout:\n words=line.lower().rstrip().split()\n for i in range(len(words)):\n if words[i]in hw_identifiers:\n try :\n word=words[get_index(i)]\n mac=int(word.replace(b':',b''),16)\n if _is_universal(mac):\n return mac\n first_local_mac=first_local_mac or mac\n except (ValueError,IndexError):\n \n \n \n \n \n pass\n except OSError:\n pass\n return first_local_mac or None\n \ndef _ifconfig_getnode():\n ''\n \n keywords=(b'hwaddr',b'ether',b'address:',b'lladdr')\n for args in ('','-a','-av'):\n mac=_find_mac('ifconfig',args,keywords,lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _ip_getnode():\n ''\n \n mac=_find_mac('ip','link',[b'link/ether'],lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _arp_getnode():\n ''\n import os,socket\n try :\n ip_addr=socket.gethostbyname(socket.gethostname())\n except OSError:\n return None\n \n \n mac=_find_mac('arp','-an',[os.fsencode(ip_addr)],lambda i:-1)\n if mac:\n return mac\n \n \n mac=_find_mac('arp','-an',[os.fsencode(ip_addr)],lambda i:i+1)\n if mac:\n return mac\n \n \n mac=_find_mac('arp','-an',[os.fsencode('(%s)'%ip_addr)],\n lambda i:i+2)\n \n if mac:\n return mac\n return None\n \ndef _lanscan_getnode():\n ''\n \n return _find_mac('lanscan','-ai',[b'lan0'],lambda i:0)\n \ndef _netstat_getnode():\n ''\n \n first_local_mac=None\n try :\n proc=_popen('netstat','-ia')\n if not proc:\n return None\n with proc:\n words=proc.stdout.readline().rstrip().split()\n try :\n i=words.index(b'Address')\n except ValueError:\n return None\n for line in proc.stdout:\n try :\n words=line.rstrip().split()\n word=words[i]\n if len(word)==17 and word.count(b':')==5:\n mac=int(word.replace(b':',b''),16)\n if _is_universal(mac):\n return mac\n first_local_mac=first_local_mac or mac\n except (ValueError,IndexError):\n pass\n except OSError:\n pass\n return first_local_mac or None\n \ndef _ipconfig_getnode():\n ''\n import os,re,subprocess\n first_local_mac=None\n dirs=['',r'c:\\windows\\system32',r'c:\\winnt\\system32']\n try :\n import ctypes\n buffer=ctypes.create_string_buffer(300)\n ctypes.windll.kernel32.GetSystemDirectoryA(buffer,300)\n dirs.insert(0,buffer.value.decode('mbcs'))\n except :\n pass\n for dir in dirs:\n try :\n proc=subprocess.Popen([os.path.join(dir,'ipconfig'),'/all'],\n stdout=subprocess.PIPE,\n encoding=\"oem\")\n except OSError:\n continue\n with proc:\n for line in proc.stdout:\n value=line.split(':')[-1].strip().lower()\n if re.fullmatch('(?:[0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]',value):\n mac=int(value.replace('-',''),16)\n if _is_universal(mac):\n return mac\n first_local_mac=first_local_mac or mac\n return first_local_mac or None\n \ndef _netbios_getnode():\n ''\n \n import win32wnet,netbios\n first_local_mac=None\n ncb=netbios.NCB()\n ncb.Command=netbios.NCBENUM\n ncb.Buffer=adapters=netbios.LANA_ENUM()\n adapters._pack()\n if win32wnet.Netbios(ncb)!=0:\n return None\n adapters._unpack()\n for i in range(adapters.length):\n ncb.Reset()\n ncb.Command=netbios.NCBRESET\n ncb.Lana_num=ord(adapters.lana[i])\n if win32wnet.Netbios(ncb)!=0:\n continue\n ncb.Reset()\n ncb.Command=netbios.NCBASTAT\n ncb.Lana_num=ord(adapters.lana[i])\n ncb.Callname='*'.ljust(16)\n ncb.Buffer=status=netbios.ADAPTER_STATUS()\n if win32wnet.Netbios(ncb)!=0:\n continue\n status._unpack()\n bytes=status.adapter_address[:6]\n if len(bytes)!=6:\n continue\n mac=int.from_bytes(bytes,'big')\n if _is_universal(mac):\n return mac\n first_local_mac=first_local_mac or mac\n return first_local_mac or None\n \n \n_generate_time_safe=_UuidCreate=None\n_has_uuid_generate_time_safe=None\n\n\ntry :\n import _uuid\nexcept ImportError:\n _uuid=None\n \n \ndef _load_system_functions():\n ''\n\n \n global _generate_time_safe,_UuidCreate,_has_uuid_generate_time_safe\n \n if _has_uuid_generate_time_safe is not None :\n return\n \n _has_uuid_generate_time_safe=False\n \n if sys.platform ==\"darwin\"and int(os.uname().release.split('.')[0])<9:\n \n \n \n \n \n \n \n pass\n elif _uuid is not None :\n _generate_time_safe=_uuid.generate_time_safe\n _has_uuid_generate_time_safe=_uuid.has_uuid_generate_time_safe\n return\n \n try :\n \n \n \n import ctypes\n import ctypes.util\n \n \n \n _libnames=['uuid']\n if not sys.platform.startswith('win'):\n _libnames.append('c')\n for libname in _libnames:\n try :\n lib=ctypes.CDLL(ctypes.util.find_library(libname))\n except Exception:\n continue\n \n if hasattr(lib,'uuid_generate_time_safe'):\n _uuid_generate_time_safe=lib.uuid_generate_time_safe\n \n def _generate_time_safe():\n _buffer=ctypes.create_string_buffer(16)\n res=_uuid_generate_time_safe(_buffer)\n return bytes(_buffer.raw),res\n _has_uuid_generate_time_safe=True\n break\n \n elif hasattr(lib,'uuid_generate_time'):\n _uuid_generate_time=lib.uuid_generate_time\n \n _uuid_generate_time.restype=None\n def _generate_time_safe():\n _buffer=ctypes.create_string_buffer(16)\n _uuid_generate_time(_buffer)\n return bytes(_buffer.raw),None\n break\n \n \n \n \n \n \n \n \n \n try :\n lib=ctypes.windll.rpcrt4\n except :\n lib=None\n _UuidCreate=getattr(lib,'UuidCreateSequential',\n getattr(lib,'UuidCreate',None ))\n \n except Exception as exc:\n import warnings\n warnings.warn(f\"Could not find fallback ctypes uuid functions: {exc}\",\n ImportWarning)\n \n \ndef _unix_getnode():\n ''\n \n _load_system_functions()\n uuid_time,_=_generate_time_safe()\n return UUID(bytes=uuid_time).node\n \ndef _windll_getnode():\n ''\n import ctypes\n _load_system_functions()\n _buffer=ctypes.create_string_buffer(16)\n if _UuidCreate(_buffer)==0:\n return UUID(bytes=bytes_(_buffer.raw)).node\n \ndef _random_getnode():\n ''\n \n \n \n \n \n \n \n \n \n \n import random\n return random.getrandbits(48)|(1 <<40)\n \n \n_node=None\n\n_NODE_GETTERS_WIN32=[_windll_getnode,_netbios_getnode,_ipconfig_getnode]\n\n_NODE_GETTERS_UNIX=[_unix_getnode,_ifconfig_getnode,_ip_getnode,\n_arp_getnode,_lanscan_getnode,_netstat_getnode]\n\ndef getnode(*,getters=None ):\n ''\n\n\n\n\n\n \n global _node\n if _node is not None :\n return _node\n \n if sys.platform =='win32':\n getters=_NODE_GETTERS_WIN32\n else :\n getters=_NODE_GETTERS_UNIX\n \n for getter in getters+[_random_getnode]:\n try :\n _node=getter()\n except :\n continue\n if (_node is not None )and (0 <=_node <(1 <<48)):\n return _node\n assert False ,'_random_getnode() returned invalid value: {}'.format(_node)\n \n \n_last_timestamp=None\n\ndef uuid1(node=None ,clock_seq=None ):\n ''\n\n\n \n \n \n \n _load_system_functions()\n if _generate_time_safe is not None and node is clock_seq is None :\n uuid_time,safely_generated=_generate_time_safe()\n try :\n is_safe=SafeUUID(safely_generated)\n except ValueError:\n is_safe=SafeUUID.unknown\n return UUID(bytes=uuid_time,is_safe=is_safe)\n \n global _last_timestamp\n import time\n nanoseconds=int(time.time()*1e9)\n \n \n timestamp=int(nanoseconds /100)+0x01b21dd213814000\n if _last_timestamp is not None and timestamp <=_last_timestamp:\n timestamp=_last_timestamp+1\n _last_timestamp=timestamp\n if clock_seq is None :\n import random\n clock_seq=random.getrandbits(14)\n time_low=timestamp&0xffffffff\n time_mid=(timestamp >>32)&0xffff\n time_hi_version=(timestamp >>48)&0x0fff\n clock_seq_low=clock_seq&0xff\n clock_seq_hi_variant=(clock_seq >>8)&0x3f\n if node is None :\n node=getnode()\n return UUID(fields=(time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node),version=1)\n \ndef uuid3(namespace,name):\n ''\n from hashlib import md5\n hash=md5(namespace.bytes+bytes(name,\"utf-8\")).digest()\n return UUID(bytes=hash[:16],version=3)\n \ndef uuid4():\n ''\n return UUID(bytes=os.urandom(16),version=4)\n \ndef uuid5(namespace,name):\n ''\n from hashlib import sha1\n hash=sha1(namespace.bytes+bytes(name,\"utf-8\")).digest()\n return UUID(bytes=hash[:16],version=5)\n \n \n \nNAMESPACE_DNS=UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_URL=UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_OID=UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_X500=UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')\n", ["_uuid", "ctypes", "ctypes.util", "enum", "hashlib", "netbios", "os", "random", "re", "shutil", "socket", "subprocess", "sys", "time", "warnings", "win32wnet"]], "VFS_import": [".py", "import os\nimport sys\nfrom browser import doc\n\n\n\n\n\n\nVFS=dict(JSObject(__BRYTHON__.py_VFS))\nclass VFSModuleFinder:\n def __init__(self,path_entry):\n print(\"in VFSModuleFinder\")\n if path_entry.startswith('/libs')or path_entry.startswith('/Lib'):\n self.path_entry=path_entry\n else :\n raise ImportError()\n \n def __str__(self):\n return '<%s for \"%s\">'%(self.__class__.__name__,self.path_entry)\n \n def find_module(self,fullname,path=None ):\n path=path or self.path_entry\n \n for _ext in ['js','pyj','py']:\n _filepath=os.path.join(self.path_entry,'%s.%s'%(fullname,_ext))\n if _filepath in VFS:\n print(\"module found at %s:%s\"%(_filepath,fullname))\n return VFSModuleLoader(_filepath,fullname)\n \n print('module %s not found'%fullname)\n raise ImportError()\n return None\n \nclass VFSModuleLoader:\n ''\n \n def __init__(self,filepath,name):\n self._filepath=filepath\n self._name=name\n \n def get_source(self):\n if self._filepath in VFS:\n return JSObject(readFromVFS(self._filepath))\n \n raise ImportError('could not find source for %s'%fullname)\n \n def is_package(self):\n return '.'in self._name\n \n def load_module(self):\n if self._name in sys.modules:\n \n mod=sys.modules[self._name]\n return mod\n \n _src=self.get_source()\n if self._filepath.endswith('.js'):\n mod=JSObject(import_js_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.py'):\n mod=JSObject(import_py_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.pyj'):\n mod=JSObject(import_pyj_module(_src,self._filepath,self._name))\n else :\n raise ImportError('Invalid Module: %s'%self._filepath)\n \n \n mod.__file__=self._filepath\n mod.__name__=self._name\n mod.__path__=os.path.abspath(self._filepath)\n mod.__loader__=self\n mod.__package__='.'.join(self._name.split('.')[:-1])\n \n if self.is_package():\n print('adding path for package')\n \n \n mod.__path__=[self.path_entry]\n else :\n print('imported as regular module')\n \n print('creating a new module object for \"%s\"'%self._name)\n sys.modules.setdefault(self._name,mod)\n JSObject(__BRYTHON__.imported)[self._name]=mod\n \n return mod\n \nJSObject(__BRYTHON__.path_hooks.insert(0,VFSModuleFinder))\n", ["browser", "os", "sys"]], "warnings": [".py", "''\n\nimport sys\n\n\n__all__=[\"warn\",\"warn_explicit\",\"showwarning\",\n\"formatwarning\",\"filterwarnings\",\"simplefilter\",\n\"resetwarnings\",\"catch_warnings\"]\n\ndef showwarning(message,category,filename,lineno,file=None ,line=None ):\n ''\n msg=WarningMessage(message,category,filename,lineno,file,line)\n _showwarnmsg_impl(msg)\n \ndef formatwarning(message,category,filename,lineno,line=None ):\n ''\n msg=WarningMessage(message,category,filename,lineno,None ,line)\n return _formatwarnmsg_impl(msg)\n \ndef _showwarnmsg_impl(msg):\n file=msg.file\n if file is None :\n file=sys.stderr\n if file is None :\n \n \n return\n text=_formatwarnmsg(msg)\n try :\n file.write(text)\n except OSError:\n \n pass\n \ndef _formatwarnmsg_impl(msg):\n s=(\"%s:%s: %s: %s\\n\"\n %(msg.filename,msg.lineno,msg.category.__name__,\n msg.message))\n \n if msg.line is None :\n try :\n import linecache\n line=linecache.getline(msg.filename,msg.lineno)\n except Exception:\n \n \n line=None\n linecache=None\n else :\n line=msg.line\n if line:\n line=line.strip()\n s +=\" %s\\n\"%line\n \n if msg.source is not None :\n try :\n import tracemalloc\n tb=tracemalloc.get_object_traceback(msg.source)\n except Exception:\n \n \n tb=None\n \n if tb is not None :\n s +='Object allocated at (most recent call last):\\n'\n for frame in tb:\n s +=(' File \"%s\", lineno %s\\n'\n %(frame.filename,frame.lineno))\n \n try :\n if linecache is not None :\n line=linecache.getline(frame.filename,frame.lineno)\n else :\n line=None\n except Exception:\n line=None\n if line:\n line=line.strip()\n s +=' %s\\n'%line\n return s\n \n \n_showwarning_orig=showwarning\n\ndef _showwarnmsg(msg):\n ''\n try :\n sw=showwarning\n except NameError:\n pass\n else :\n if sw is not _showwarning_orig:\n \n if not callable(sw):\n raise TypeError(\"warnings.showwarning() must be set to a \"\n \"function or method\")\n \n sw(msg.message,msg.category,msg.filename,msg.lineno,\n msg.file,msg.line)\n return\n _showwarnmsg_impl(msg)\n \n \n_formatwarning_orig=formatwarning\n\ndef _formatwarnmsg(msg):\n ''\n try :\n fw=formatwarning\n except NameError:\n pass\n else :\n if fw is not _formatwarning_orig:\n \n return fw(msg.message,msg.category,\n msg.filename,msg.lineno,line=msg.line)\n return _formatwarnmsg_impl(msg)\n \ndef filterwarnings(action,message=\"\",category=Warning,module=\"\",lineno=0,\nappend=False ):\n ''\n\n\n\n\n\n\n\n\n \n assert action in (\"error\",\"ignore\",\"always\",\"default\",\"module\",\n \"once\"),\"invalid action: %r\"%(action,)\n assert isinstance(message,str),\"message must be a string\"\n assert isinstance(category,type),\"category must be a class\"\n assert issubclass(category,Warning),\"category must be a Warning subclass\"\n assert isinstance(module,str),\"module must be a string\"\n assert isinstance(lineno,int)and lineno >=0,\\\n \"lineno must be an int >= 0\"\n \n if message or module:\n import re\n \n if message:\n message=re.compile(message,re.I)\n else :\n message=None\n if module:\n module=re.compile(module)\n else :\n module=None\n \n _add_filter(action,message,category,module,lineno,append=append)\n \ndef simplefilter(action,category=Warning,lineno=0,append=False ):\n ''\n\n\n\n\n\n\n\n \n assert action in (\"error\",\"ignore\",\"always\",\"default\",\"module\",\n \"once\"),\"invalid action: %r\"%(action,)\n assert isinstance(lineno,int)and lineno >=0,\\\n \"lineno must be an int >= 0\"\n _add_filter(action,None ,category,None ,lineno,append=append)\n \ndef _add_filter(*item,append):\n\n\n if not append:\n try :\n filters.remove(item)\n except ValueError:\n pass\n filters.insert(0,item)\n else :\n if item not in filters:\n filters.append(item)\n _filters_mutated()\n \ndef resetwarnings():\n ''\n filters[:]=[]\n _filters_mutated()\n \nclass _OptionError(Exception):\n ''\n pass\n \n \ndef _processoptions(args):\n for arg in args:\n try :\n _setoption(arg)\n except _OptionError as msg:\n print(\"Invalid -W option ignored:\",msg,file=sys.stderr)\n \n \ndef _setoption(arg):\n import re\n parts=arg.split(':')\n if len(parts)>5:\n raise _OptionError(\"too many fields (max 5): %r\"%(arg,))\n while len(parts)<5:\n parts.append('')\n action,message,category,module,lineno=[s.strip()\n for s in parts]\n action=_getaction(action)\n message=re.escape(message)\n category=_getcategory(category)\n module=re.escape(module)\n if module:\n module=module+'$'\n if lineno:\n try :\n lineno=int(lineno)\n if lineno <0:\n raise ValueError\n except (ValueError,OverflowError):\n raise _OptionError(\"invalid lineno %r\"%(lineno,))from None\n else :\n lineno=0\n filterwarnings(action,message,category,module,lineno)\n \n \ndef _getaction(action):\n if not action:\n return \"default\"\n if action ==\"all\":return \"always\"\n for a in ('default','always','ignore','module','once','error'):\n if a.startswith(action):\n return a\n raise _OptionError(\"invalid action: %r\"%(action,))\n \n \ndef _getcategory(category):\n import re\n if not category:\n return Warning\n if re.match(\"^[a-zA-Z0-9_]+$\",category):\n try :\n cat=eval(category)\n except NameError:\n raise _OptionError(\"unknown warning category: %r\"%(category,))from None\n else :\n i=category.rfind(\".\")\n module=category[:i]\n klass=category[i+1:]\n try :\n m=__import__(module,None ,None ,[klass])\n except ImportError:\n raise _OptionError(\"invalid module name: %r\"%(module,))from None\n try :\n cat=getattr(m,klass)\n except AttributeError:\n raise _OptionError(\"unknown warning category: %r\"%(category,))from None\n if not issubclass(cat,Warning):\n raise _OptionError(\"invalid warning category: %r\"%(category,))\n return cat\n \n \ndef _is_internal_frame(frame):\n ''\n filename=frame.f_code.co_filename\n return 'importlib'in filename and '_bootstrap'in filename\n \n \ndef _next_external_frame(frame):\n ''\n frame=frame.f_back\n while frame is not None and _is_internal_frame(frame):\n frame=frame.f_back\n return frame\n \n \n \ndef warn(message,category=None ,stacklevel=1,source=None ):\n ''\n \n if isinstance(message,Warning):\n category=message.__class__\n \n if category is None :\n category=UserWarning\n if not (isinstance(category,type)and issubclass(category,Warning)):\n raise TypeError(\"category must be a Warning subclass, \"\n \"not '{:s}'\".format(type(category).__name__))\n \n try :\n if stacklevel <=1 or _is_internal_frame(sys._getframe(1)):\n \n \n frame=sys._getframe(stacklevel)\n else :\n frame=sys._getframe(1)\n \n for x in range(stacklevel -1):\n frame=_next_external_frame(frame)\n if frame is None :\n raise ValueError\n except ValueError:\n globals=sys.__dict__\n lineno=1\n else :\n globals=frame.f_globals\n lineno=frame.f_lineno\n if '__name__'in globals:\n module=globals['__name__']\n else :\n module=\"\"\n filename=globals.get('__file__')\n if filename:\n fnl=filename.lower()\n if fnl.endswith(\".pyc\"):\n filename=filename[:-1]\n else :\n if module ==\"__main__\":\n try :\n filename=sys.argv[0]\n except AttributeError:\n \n filename='__main__'\n if not filename:\n filename=module\n registry=globals.setdefault(\"__warningregistry__\",{})\n warn_explicit(message,category,filename,lineno,module,registry,\n globals,source)\n \ndef warn_explicit(message,category,filename,lineno,\nmodule=None ,registry=None ,module_globals=None ,\nsource=None ):\n lineno=int(lineno)\n if module is None :\n module=filename or \"\"\n if module[-3:].lower()==\".py\":\n module=module[:-3]\n if registry is None :\n registry={}\n if registry.get('version',0)!=_filters_version:\n registry.clear()\n registry['version']=_filters_version\n if isinstance(message,Warning):\n text=str(message)\n category=message.__class__\n else :\n text=message\n message=category(message)\n key=(text,category,lineno)\n \n if registry.get(key):\n return\n \n for item in filters:\n action,msg,cat,mod,ln=item\n if ((msg is None or msg.match(text))and\n issubclass(category,cat)and\n (mod is None or mod.match(module))and\n (ln ==0 or lineno ==ln)):\n break\n else :\n action=defaultaction\n \n if action ==\"ignore\":\n return\n \n \n \n import linecache\n linecache.getlines(filename,module_globals)\n \n if action ==\"error\":\n raise message\n \n if action ==\"once\":\n registry[key]=1\n oncekey=(text,category)\n if onceregistry.get(oncekey):\n return\n onceregistry[oncekey]=1\n elif action ==\"always\":\n pass\n elif action ==\"module\":\n registry[key]=1\n altkey=(text,category,0)\n if registry.get(altkey):\n return\n registry[altkey]=1\n elif action ==\"default\":\n registry[key]=1\n else :\n \n raise RuntimeError(\n \"Unrecognized action (%r) in warnings.filters:\\n %s\"%\n (action,item))\n \n msg=WarningMessage(message,category,filename,lineno,source)\n _showwarnmsg(msg)\n \n \nclass WarningMessage(object):\n\n _WARNING_DETAILS=(\"message\",\"category\",\"filename\",\"lineno\",\"file\",\n \"line\",\"source\")\n \n def __init__(self,message,category,filename,lineno,file=None ,\n line=None ,source=None ):\n self.message=message\n self.category=category\n self.filename=filename\n self.lineno=lineno\n self.file=file\n self.line=line\n self.source=source\n self._category_name=category.__name__ if category else None\n \n def __str__(self):\n return (\"{message : %r, category : %r, filename : %r, lineno : %s, \"\n \"line : %r}\"%(self.message,self._category_name,\n self.filename,self.lineno,self.line))\n \n \nclass catch_warnings(object):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,*,record=False ,module=None ):\n ''\n\n\n\n\n\n \n self._record=record\n self._module=sys.modules['warnings']if module is None else module\n self._entered=False\n \n def __repr__(self):\n args=[]\n if self._record:\n args.append(\"record=True\")\n if self._module is not sys.modules['warnings']:\n args.append(\"module=%r\"%self._module)\n name=type(self).__name__\n return \"%s(%s)\"%(name,\", \".join(args))\n \n def __enter__(self):\n if self._entered:\n raise RuntimeError(\"Cannot enter %r twice\"%self)\n self._entered=True\n self._filters=self._module.filters\n self._module.filters=self._filters[:]\n self._module._filters_mutated()\n self._showwarning=self._module.showwarning\n self._showwarnmsg_impl=self._module._showwarnmsg_impl\n if self._record:\n log=[]\n self._module._showwarnmsg_impl=log.append\n \n \n self._module.showwarning=self._module._showwarning_orig\n return log\n else :\n return None\n \n def __exit__(self,*exc_info):\n if not self._entered:\n raise RuntimeError(\"Cannot exit %r without entering first\"%self)\n self._module.filters=self._filters\n self._module._filters_mutated()\n self._module.showwarning=self._showwarning\n self._module._showwarnmsg_impl=self._showwarnmsg_impl\n \n \n \ndef _warn_unawaited_coroutine(coro):\n msg_lines=[\n f\"coroutine '{coro.__qualname__}' was never awaited\\n\"\n ]\n if coro.cr_origin is not None :\n import linecache,traceback\n def extract():\n for filename,lineno,funcname in reversed(coro.cr_origin):\n line=linecache.getline(filename,lineno)\n yield (filename,lineno,funcname,line)\n msg_lines.append(\"Coroutine created at (most recent call last)\\n\")\n msg_lines +=traceback.format_list(list(extract()))\n msg=\"\".join(msg_lines).rstrip(\"\\n\")\n \n \n \n \n \n \n warn(msg,category=RuntimeWarning,stacklevel=2,source=coro)\n \n \n \n \n \n \n \n \n \n \ntry :\n from _warnings import (filters,_defaultaction,_onceregistry,\n warn,warn_explicit,_filters_mutated)\n defaultaction=_defaultaction\n onceregistry=_onceregistry\n _warnings_defaults=True\nexcept ImportError:\n filters=[]\n defaultaction=\"default\"\n onceregistry={}\n \n _filters_version=1\n \n def _filters_mutated():\n global _filters_version\n _filters_version +=1\n \n _warnings_defaults=False\n \n \n \n_processoptions(sys.warnoptions)\nif not _warnings_defaults:\n\n if not hasattr(sys,'gettotalrefcount'):\n filterwarnings(\"default\",category=DeprecationWarning,\n module=\"__main__\",append=1)\n simplefilter(\"ignore\",category=DeprecationWarning,append=1)\n simplefilter(\"ignore\",category=PendingDeprecationWarning,append=1)\n simplefilter(\"ignore\",category=ImportWarning,append=1)\n simplefilter(\"ignore\",category=ResourceWarning,append=1)\n \ndel _warnings_defaults\n", ["_warnings", "linecache", "re", "sys", "traceback", "tracemalloc"]], "weakref": [".py", "''\n\n\n\n\n\n\n\n\n\n\nfrom _weakref import (\ngetweakrefcount,\ngetweakrefs,\nref,\nproxy,\nCallableProxyType,\nProxyType,\nReferenceType,\n_remove_dead_weakref)\n\nfrom _weakrefset import WeakSet,_IterationGuard\n\nimport _collections_abc\nimport sys\nimport itertools\n\nProxyTypes=(ProxyType,CallableProxyType)\n\n__all__=[\"ref\",\"proxy\",\"getweakrefcount\",\"getweakrefs\",\n\"WeakKeyDictionary\",\"ReferenceType\",\"ProxyType\",\n\"CallableProxyType\",\"ProxyTypes\",\"WeakValueDictionary\",\n\"WeakSet\",\"WeakMethod\",\"finalize\"]\n\n\nclass WeakMethod(ref):\n ''\n\n\n \n \n __slots__=\"_func_ref\",\"_meth_type\",\"_alive\",\"__weakref__\"\n \n def __new__(cls,meth,callback=None ):\n try :\n obj=meth.__self__\n func=meth.__func__\n except AttributeError:\n raise TypeError(\"argument should be a bound method, not {}\"\n .format(type(meth)))from None\n def _cb(arg):\n \n \n self=self_wr()\n if self._alive:\n self._alive=False\n if callback is not None :\n callback(self)\n self=ref.__new__(cls,obj,_cb)\n self._func_ref=ref(func,_cb)\n self._meth_type=type(meth)\n self._alive=True\n self_wr=ref(self)\n return self\n \n def __call__(self):\n obj=super().__call__()\n func=self._func_ref()\n if obj is None or func is None :\n return None\n return self._meth_type(func,obj)\n \n def __eq__(self,other):\n if isinstance(other,WeakMethod):\n if not self._alive or not other._alive:\n return self is other\n return ref.__eq__(self,other)and self._func_ref ==other._func_ref\n return False\n \n def __ne__(self,other):\n if isinstance(other,WeakMethod):\n if not self._alive or not other._alive:\n return self is not other\n return ref.__ne__(self,other)or self._func_ref !=other._func_ref\n return True\n \n __hash__=ref.__hash__\n \n \nclass WeakValueDictionary(_collections_abc.MutableMapping):\n ''\n\n\n\n \n \n \n \n \n \n \n def __init__(*args,**kw):\n if not args:\n raise TypeError(\"descriptor '__init__' of 'WeakValueDictionary' \"\n \"object needs an argument\")\n self,*args=args\n if len(args)>1:\n raise TypeError('expected at most 1 arguments, got %d'%len(args))\n def remove(wr,selfref=ref(self),_atomic_removal=_remove_dead_weakref):\n self=selfref()\n if self is not None :\n if self._iterating:\n self._pending_removals.append(wr.key)\n else :\n \n \n _atomic_removal(d,wr.key)\n self._remove=remove\n \n self._pending_removals=[]\n self._iterating=set()\n self.data=d={}\n self.update(*args,**kw)\n \n def _commit_removals(self):\n l=self._pending_removals\n d=self.data\n \n \n while l:\n key=l.pop()\n _remove_dead_weakref(d,key)\n \n def __getitem__(self,key):\n if self._pending_removals:\n self._commit_removals()\n o=self.data[key]()\n if o is None :\n raise KeyError(key)\n else :\n return o\n \n def __delitem__(self,key):\n if self._pending_removals:\n self._commit_removals()\n del self.data[key]\n \n def __len__(self):\n if self._pending_removals:\n self._commit_removals()\n return len(self.data)\n \n def __contains__(self,key):\n if self._pending_removals:\n self._commit_removals()\n try :\n o=self.data[key]()\n except KeyError:\n return False\n return o is not None\n \n def __repr__(self):\n return \"<%s at %#x>\"%(self.__class__.__name__,id(self))\n \n def __setitem__(self,key,value):\n if self._pending_removals:\n self._commit_removals()\n self.data[key]=KeyedRef(value,self._remove,key)\n \n def copy(self):\n if self._pending_removals:\n self._commit_removals()\n new=WeakValueDictionary()\n for key,wr in self.data.items():\n o=wr()\n if o is not None :\n new[key]=o\n return new\n \n __copy__=copy\n \n def __deepcopy__(self,memo):\n from copy import deepcopy\n if self._pending_removals:\n self._commit_removals()\n new=self.__class__()\n for key,wr in self.data.items():\n o=wr()\n if o is not None :\n new[deepcopy(key,memo)]=o\n return new\n \n def get(self,key,default=None ):\n if self._pending_removals:\n self._commit_removals()\n try :\n wr=self.data[key]\n except KeyError:\n return default\n else :\n o=wr()\n if o is None :\n \n return default\n else :\n return o\n \n def items(self):\n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n for k,wr in self.data.items():\n v=wr()\n if v is not None :\n yield k,v\n \n def keys(self):\n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n for k,wr in self.data.items():\n if wr()is not None :\n yield k\n \n __iter__=keys\n \n def itervaluerefs(self):\n ''\n\n\n\n\n\n\n\n \n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n yield from self.data.values()\n \n def values(self):\n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n for wr in self.data.values():\n obj=wr()\n if obj is not None :\n yield obj\n \n def popitem(self):\n if self._pending_removals:\n self._commit_removals()\n while True :\n key,wr=self.data.popitem()\n o=wr()\n if o is not None :\n return key,o\n \n def pop(self,key,*args):\n if self._pending_removals:\n self._commit_removals()\n try :\n o=self.data.pop(key)()\n except KeyError:\n o=None\n if o is None :\n if args:\n return args[0]\n else :\n raise KeyError(key)\n else :\n return o\n \n def setdefault(self,key,default=None ):\n try :\n o=self.data[key]()\n except KeyError:\n o=None\n if o is None :\n if self._pending_removals:\n self._commit_removals()\n self.data[key]=KeyedRef(default,self._remove,key)\n return default\n else :\n return o\n \n def update(*args,**kwargs):\n if not args:\n raise TypeError(\"descriptor 'update' of 'WeakValueDictionary' \"\n \"object needs an argument\")\n self,*args=args\n if len(args)>1:\n raise TypeError('expected at most 1 arguments, got %d'%len(args))\n dict=args[0]if args else None\n if self._pending_removals:\n self._commit_removals()\n d=self.data\n if dict is not None :\n if not hasattr(dict,\"items\"):\n dict=type({})(dict)\n for key,o in dict.items():\n d[key]=KeyedRef(o,self._remove,key)\n if len(kwargs):\n self.update(kwargs)\n \n def valuerefs(self):\n ''\n\n\n\n\n\n\n\n \n if self._pending_removals:\n self._commit_removals()\n return list(self.data.values())\n \n \nclass KeyedRef(ref):\n ''\n\n\n\n\n\n\n \n \n __slots__=\"key\",\n \n def __new__(type,ob,callback,key):\n self=ref.__new__(type,ob,callback)\n self.key=key\n return self\n \n def __init__(self,ob,callback,key):\n super().__init__(ob,callback)\n \n \nclass WeakKeyDictionary(_collections_abc.MutableMapping):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,dict=None ):\n self.data={}\n def remove(k,selfref=ref(self)):\n self=selfref()\n if self is not None :\n if self._iterating:\n self._pending_removals.append(k)\n else :\n del self.data[k]\n self._remove=remove\n \n self._pending_removals=[]\n self._iterating=set()\n self._dirty_len=False\n if dict is not None :\n self.update(dict)\n \n def _commit_removals(self):\n \n \n \n \n l=self._pending_removals\n d=self.data\n while l:\n try :\n del d[l.pop()]\n except KeyError:\n pass\n \n def _scrub_removals(self):\n d=self.data\n self._pending_removals=[k for k in self._pending_removals if k in d]\n self._dirty_len=False\n \n def __delitem__(self,key):\n self._dirty_len=True\n del self.data[ref(key)]\n \n def __getitem__(self,key):\n return self.data[ref(key)]\n \n def __len__(self):\n if self._dirty_len and self._pending_removals:\n \n \n self._scrub_removals()\n return len(self.data)-len(self._pending_removals)\n \n def __repr__(self):\n return \"<%s at %#x>\"%(self.__class__.__name__,id(self))\n \n def __setitem__(self,key,value):\n self.data[ref(key,self._remove)]=value\n \n def copy(self):\n new=WeakKeyDictionary()\n for key,value in self.data.items():\n o=key()\n if o is not None :\n new[o]=value\n return new\n \n __copy__=copy\n \n def __deepcopy__(self,memo):\n from copy import deepcopy\n new=self.__class__()\n for key,value in self.data.items():\n o=key()\n if o is not None :\n new[o]=deepcopy(value,memo)\n return new\n \n def get(self,key,default=None ):\n return self.data.get(ref(key),default)\n \n def __contains__(self,key):\n try :\n wr=ref(key)\n except TypeError:\n return False\n return wr in self.data\n \n def items(self):\n with _IterationGuard(self):\n for wr,value in self.data.items():\n key=wr()\n if key is not None :\n yield key,value\n \n def keys(self):\n with _IterationGuard(self):\n for wr in self.data:\n obj=wr()\n if obj is not None :\n yield obj\n \n __iter__=keys\n \n def values(self):\n with _IterationGuard(self):\n for wr,value in self.data.items():\n if wr()is not None :\n yield value\n \n def keyrefs(self):\n ''\n\n\n\n\n\n\n\n \n return list(self.data)\n \n def popitem(self):\n self._dirty_len=True\n while True :\n key,value=self.data.popitem()\n o=key()\n if o is not None :\n return o,value\n \n def pop(self,key,*args):\n self._dirty_len=True\n return self.data.pop(ref(key),*args)\n \n def setdefault(self,key,default=None ):\n return self.data.setdefault(ref(key,self._remove),default)\n \n def update(self,dict=None ,**kwargs):\n d=self.data\n if dict is not None :\n if not hasattr(dict,\"items\"):\n dict=type({})(dict)\n for key,value in dict.items():\n d[ref(key,self._remove)]=value\n if len(kwargs):\n self.update(kwargs)\n \n \nclass finalize:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n __slots__=()\n _registry={}\n _shutdown=False\n _index_iter=itertools.count()\n _dirty=False\n _registered_with_atexit=False\n \n class _Info:\n __slots__=(\"weakref\",\"func\",\"args\",\"kwargs\",\"atexit\",\"index\")\n \n def __init__(self,obj,func,*args,**kwargs):\n if not self._registered_with_atexit:\n \n \n import atexit\n atexit.register(self._exitfunc)\n finalize._registered_with_atexit=True\n info=self._Info()\n info.weakref=ref(obj,self)\n info.func=func\n info.args=args\n info.kwargs=kwargs or None\n info.atexit=True\n info.index=next(self._index_iter)\n self._registry[self]=info\n finalize._dirty=True\n \n def __call__(self,_=None ):\n ''\n \n info=self._registry.pop(self,None )\n if info and not self._shutdown:\n return info.func(*info.args,**(info.kwargs or {}))\n \n def detach(self):\n ''\n \n info=self._registry.get(self)\n obj=info and info.weakref()\n if obj is not None and self._registry.pop(self,None ):\n return (obj,info.func,info.args,info.kwargs or {})\n \n def peek(self):\n ''\n \n info=self._registry.get(self)\n obj=info and info.weakref()\n if obj is not None :\n return (obj,info.func,info.args,info.kwargs or {})\n \n @property\n def alive(self):\n ''\n return self in self._registry\n \n @property\n def atexit(self):\n ''\n info=self._registry.get(self)\n return bool(info)and info.atexit\n \n @atexit.setter\n def atexit(self,value):\n info=self._registry.get(self)\n if info:\n info.atexit=bool(value)\n \n def __repr__(self):\n info=self._registry.get(self)\n obj=info and info.weakref()\n if obj is None :\n return '<%s object at %#x; dead>'%(type(self).__name__,id(self))\n else :\n return '<%s object at %#x; for %r at %#x>'%\\\n (type(self).__name__,id(self),type(obj).__name__,id(obj))\n \n @classmethod\n def _select_for_exit(cls):\n \n L=[(f,i)for (f,i)in cls._registry.items()if i.atexit]\n L.sort(key=lambda item:item[1].index)\n return [f for (f,i)in L]\n \n @classmethod\n def _exitfunc(cls):\n \n \n \n reenable_gc=False\n try :\n if cls._registry:\n import gc\n if gc.isenabled():\n reenable_gc=True\n gc.disable()\n pending=None\n while True :\n if pending is None or finalize._dirty:\n pending=cls._select_for_exit()\n finalize._dirty=False\n if not pending:\n break\n f=pending.pop()\n try :\n \n \n \n \n f()\n except Exception:\n sys.excepthook(*sys.exc_info())\n assert f not in cls._registry\n finally :\n \n finalize._shutdown=True\n if reenable_gc:\n gc.enable()\n", ["_collections_abc", "_weakref", "_weakrefset", "atexit", "copy", "gc", "itertools", "sys"]], "webbrowser": [".py", "\n\nfrom browser import window\n\ndef open(url,new=0,autoraise=True ):\n window.open(url)\n \ndef open_new(url):\n return window.open(url,\"_blank\")\n \ndef open_new_tab(url):\n return open(url)\n \n", ["browser"]], "zipfile": [".py", "''\n\n\n\n\nimport io\nimport os\nimport importlib.util\nimport sys\nimport time\nimport stat\nimport shutil\nimport struct\nimport binascii\nimport threading\n\ntry :\n import zlib\n crc32=zlib.crc32\nexcept ImportError:\n zlib=None\n crc32=binascii.crc32\n \ntry :\n import bz2\nexcept ImportError:\n bz2=None\n \ntry :\n import lzma\nexcept ImportError:\n lzma=None\n \n__all__=[\"BadZipFile\",\"BadZipfile\",\"error\",\n\"ZIP_STORED\",\"ZIP_DEFLATED\",\"ZIP_BZIP2\",\"ZIP_LZMA\",\n\"is_zipfile\",\"ZipInfo\",\"ZipFile\",\"PyZipFile\",\"LargeZipFile\"]\n\nclass BadZipFile(Exception):\n pass\n \n \nclass LargeZipFile(Exception):\n ''\n\n\n \n \nerror=BadZipfile=BadZipFile\n\n\nZIP64_LIMIT=(1 <<31)-1\nZIP_FILECOUNT_LIMIT=(1 <<16)-1\nZIP_MAX_COMMENT=(1 <<16)-1\n\n\nZIP_STORED=0\nZIP_DEFLATED=8\nZIP_BZIP2=12\nZIP_LZMA=14\n\n\nDEFAULT_VERSION=20\nZIP64_VERSION=45\nBZIP2_VERSION=46\nLZMA_VERSION=63\n\nMAX_EXTRACT_VERSION=63\n\n\n\n\n\n\n\n\n\nstructEndArchive=b\"<4s4H2LH\"\nstringEndArchive=b\"PK\\005\\006\"\nsizeEndCentDir=struct.calcsize(structEndArchive)\n\n_ECD_SIGNATURE=0\n_ECD_DISK_NUMBER=1\n_ECD_DISK_START=2\n_ECD_ENTRIES_THIS_DISK=3\n_ECD_ENTRIES_TOTAL=4\n_ECD_SIZE=5\n_ECD_OFFSET=6\n_ECD_COMMENT_SIZE=7\n\n\n_ECD_COMMENT=8\n_ECD_LOCATION=9\n\n\n\nstructCentralDir=\"<4s4B4HL2L5H2L\"\nstringCentralDir=b\"PK\\001\\002\"\nsizeCentralDir=struct.calcsize(structCentralDir)\n\n\n_CD_SIGNATURE=0\n_CD_CREATE_VERSION=1\n_CD_CREATE_SYSTEM=2\n_CD_EXTRACT_VERSION=3\n_CD_EXTRACT_SYSTEM=4\n_CD_FLAG_BITS=5\n_CD_COMPRESS_TYPE=6\n_CD_TIME=7\n_CD_DATE=8\n_CD_CRC=9\n_CD_COMPRESSED_SIZE=10\n_CD_UNCOMPRESSED_SIZE=11\n_CD_FILENAME_LENGTH=12\n_CD_EXTRA_FIELD_LENGTH=13\n_CD_COMMENT_LENGTH=14\n_CD_DISK_NUMBER_START=15\n_CD_INTERNAL_FILE_ATTRIBUTES=16\n_CD_EXTERNAL_FILE_ATTRIBUTES=17\n_CD_LOCAL_HEADER_OFFSET=18\n\n\n\nstructFileHeader=\"<4s2B4HL2L2H\"\nstringFileHeader=b\"PK\\003\\004\"\nsizeFileHeader=struct.calcsize(structFileHeader)\n\n_FH_SIGNATURE=0\n_FH_EXTRACT_VERSION=1\n_FH_EXTRACT_SYSTEM=2\n_FH_GENERAL_PURPOSE_FLAG_BITS=3\n_FH_COMPRESSION_METHOD=4\n_FH_LAST_MOD_TIME=5\n_FH_LAST_MOD_DATE=6\n_FH_CRC=7\n_FH_COMPRESSED_SIZE=8\n_FH_UNCOMPRESSED_SIZE=9\n_FH_FILENAME_LENGTH=10\n_FH_EXTRA_FIELD_LENGTH=11\n\n\nstructEndArchive64Locator=\"<4sLQL\"\nstringEndArchive64Locator=b\"PK\\x06\\x07\"\nsizeEndCentDir64Locator=struct.calcsize(structEndArchive64Locator)\n\n\n\nstructEndArchive64=\"<4sQ2H2L4Q\"\nstringEndArchive64=b\"PK\\x06\\x06\"\nsizeEndCentDir64=struct.calcsize(structEndArchive64)\n\n_CD64_SIGNATURE=0\n_CD64_DIRECTORY_RECSIZE=1\n_CD64_CREATE_VERSION=2\n_CD64_EXTRACT_VERSION=3\n_CD64_DISK_NUMBER=4\n_CD64_DISK_NUMBER_START=5\n_CD64_NUMBER_ENTRIES_THIS_DISK=6\n_CD64_NUMBER_ENTRIES_TOTAL=7\n_CD64_DIRECTORY_SIZE=8\n_CD64_OFFSET_START_CENTDIR=9\n\ndef _check_zipfile(fp):\n try :\n if _EndRecData(fp):\n return True\n except OSError:\n pass\n return False\n \ndef is_zipfile(filename):\n ''\n\n\n \n result=False\n try :\n if hasattr(filename,\"read\"):\n result=_check_zipfile(fp=filename)\n else :\n with open(filename,\"rb\")as fp:\n result=_check_zipfile(fp)\n except OSError:\n pass\n return result\n \ndef _EndRecData64(fpin,offset,endrec):\n ''\n\n \n try :\n fpin.seek(offset -sizeEndCentDir64Locator,2)\n except OSError:\n \n \n return endrec\n \n data=fpin.read(sizeEndCentDir64Locator)\n if len(data)!=sizeEndCentDir64Locator:\n return endrec\n sig,diskno,reloff,disks=struct.unpack(structEndArchive64Locator,data)\n if sig !=stringEndArchive64Locator:\n return endrec\n \n if diskno !=0 or disks !=1:\n raise BadZipFile(\"zipfiles that span multiple disks are not supported\")\n \n \n fpin.seek(offset -sizeEndCentDir64Locator -sizeEndCentDir64,2)\n data=fpin.read(sizeEndCentDir64)\n if len(data)!=sizeEndCentDir64:\n return endrec\n sig,sz,create_version,read_version,disk_num,disk_dir,\\\n dircount,dircount2,dirsize,diroffset=\\\n struct.unpack(structEndArchive64,data)\n if sig !=stringEndArchive64:\n return endrec\n \n \n endrec[_ECD_SIGNATURE]=sig\n endrec[_ECD_DISK_NUMBER]=disk_num\n endrec[_ECD_DISK_START]=disk_dir\n endrec[_ECD_ENTRIES_THIS_DISK]=dircount\n endrec[_ECD_ENTRIES_TOTAL]=dircount2\n endrec[_ECD_SIZE]=dirsize\n endrec[_ECD_OFFSET]=diroffset\n return endrec\n \n \ndef _EndRecData(fpin):\n ''\n\n\n \n \n \n fpin.seek(0,2)\n filesize=fpin.tell()\n \n \n \n \n try :\n fpin.seek(-sizeEndCentDir,2)\n except OSError:\n return None\n data=fpin.read()\n if (len(data)==sizeEndCentDir and\n data[0:4]==stringEndArchive and\n data[-2:]==b\"\\000\\000\"):\n \n endrec=struct.unpack(structEndArchive,data)\n endrec=list(endrec)\n \n \n endrec.append(b\"\")\n endrec.append(filesize -sizeEndCentDir)\n \n \n return _EndRecData64(fpin,-sizeEndCentDir,endrec)\n \n \n \n \n \n \n maxCommentStart=max(filesize -(1 <<16)-sizeEndCentDir,0)\n fpin.seek(maxCommentStart,0)\n data=fpin.read()\n start=data.rfind(stringEndArchive)\n if start >=0:\n \n recData=data[start:start+sizeEndCentDir]\n if len(recData)!=sizeEndCentDir:\n \n return None\n endrec=list(struct.unpack(structEndArchive,recData))\n commentSize=endrec[_ECD_COMMENT_SIZE]\n comment=data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]\n endrec.append(comment)\n endrec.append(maxCommentStart+start)\n \n \n return _EndRecData64(fpin,maxCommentStart+start -filesize,\n endrec)\n \n \n return None\n \n \nclass ZipInfo(object):\n ''\n \n __slots__=(\n 'orig_filename',\n 'filename',\n 'date_time',\n 'compress_type',\n '_compresslevel',\n 'comment',\n 'extra',\n 'create_system',\n 'create_version',\n 'extract_version',\n 'reserved',\n 'flag_bits',\n 'volume',\n 'internal_attr',\n 'external_attr',\n 'header_offset',\n 'CRC',\n 'compress_size',\n 'file_size',\n '_raw_time',\n )\n \n def __init__(self,filename=\"NoName\",date_time=(1980,1,1,0,0,0)):\n self.orig_filename=filename\n \n \n \n null_byte=filename.find(chr(0))\n if null_byte >=0:\n filename=filename[0:null_byte]\n \n \n \n if os.sep !=\"/\"and os.sep in filename:\n filename=filename.replace(os.sep,\"/\")\n \n self.filename=filename\n self.date_time=date_time\n \n if date_time[0]<1980:\n raise ValueError('ZIP does not support timestamps before 1980')\n \n \n self.compress_type=ZIP_STORED\n self._compresslevel=None\n self.comment=b\"\"\n self.extra=b\"\"\n if sys.platform =='win32':\n self.create_system=0\n else :\n \n self.create_system=3\n self.create_version=DEFAULT_VERSION\n self.extract_version=DEFAULT_VERSION\n self.reserved=0\n self.flag_bits=0\n self.volume=0\n self.internal_attr=0\n self.external_attr=0\n \n \n \n \n \n \n def __repr__(self):\n result=['<%s filename=%r'%(self.__class__.__name__,self.filename)]\n if self.compress_type !=ZIP_STORED:\n result.append(' compress_type=%s'%\n compressor_names.get(self.compress_type,\n self.compress_type))\n hi=self.external_attr >>16\n lo=self.external_attr&0xFFFF\n if hi:\n result.append(' filemode=%r'%stat.filemode(hi))\n if lo:\n result.append(' external_attr=%#x'%lo)\n isdir=self.is_dir()\n if not isdir or self.file_size:\n result.append(' file_size=%r'%self.file_size)\n if ((not isdir or self.compress_size)and\n (self.compress_type !=ZIP_STORED or\n self.file_size !=self.compress_size)):\n result.append(' compress_size=%r'%self.compress_size)\n result.append('>')\n return ''.join(result)\n \n def FileHeader(self,zip64=None ):\n ''\n dt=self.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n if self.flag_bits&0x08:\n \n CRC=compress_size=file_size=0\n else :\n CRC=self.CRC\n compress_size=self.compress_size\n file_size=self.file_size\n \n extra=self.extra\n \n min_version=0\n if zip64 is None :\n zip64=file_size >ZIP64_LIMIT or compress_size >ZIP64_LIMIT\n if zip64:\n fmt='ZIP64_LIMIT or compress_size >ZIP64_LIMIT:\n if not zip64:\n raise LargeZipFile(\"Filesize would require ZIP64 extensions\")\n \n \n file_size=0xffffffff\n compress_size=0xffffffff\n min_version=ZIP64_VERSION\n \n if self.compress_type ==ZIP_BZIP2:\n min_version=max(BZIP2_VERSION,min_version)\n elif self.compress_type ==ZIP_LZMA:\n min_version=max(LZMA_VERSION,min_version)\n \n self.extract_version=max(min_version,self.extract_version)\n self.create_version=max(min_version,self.create_version)\n filename,flag_bits=self._encodeFilenameFlags()\n header=struct.pack(structFileHeader,stringFileHeader,\n self.extract_version,self.reserved,flag_bits,\n self.compress_type,dostime,dosdate,CRC,\n compress_size,file_size,\n len(filename),len(extra))\n return header+filename+extra\n \n def _encodeFilenameFlags(self):\n try :\n return self.filename.encode('ascii'),self.flag_bits\n except UnicodeEncodeError:\n return self.filename.encode('utf-8'),self.flag_bits |0x800\n \n def _decodeExtra(self):\n \n extra=self.extra\n unpack=struct.unpack\n while len(extra)>=4:\n tp,ln=unpack('len(extra):\n raise BadZipFile(\"Corrupt extra field %04x (size=%d)\"%(tp,ln))\n if tp ==0x0001:\n if ln >=24:\n counts=unpack('>1)^0xEDB88320\n else :\n crc >>=1\n return crc\n \n \n \n \n \n \n \n \n \ndef _ZipDecrypter(pwd):\n key0=305419896\n key1=591751049\n key2=878082192\n \n global _crctable\n if _crctable is None :\n _crctable=list(map(_gen_crc,range(256)))\n crctable=_crctable\n \n def crc32(ch,crc):\n ''\n return (crc >>8)^crctable[(crc ^ch)&0xFF]\n \n def update_keys(c):\n nonlocal key0,key1,key2\n key0=crc32(c,key0)\n key1=(key1+(key0&0xFF))&0xFFFFFFFF\n key1=(key1 *134775813+1)&0xFFFFFFFF\n key2=crc32(key1 >>24,key2)\n \n for p in pwd:\n update_keys(p)\n \n def decrypter(data):\n ''\n result=bytearray()\n append=result.append\n for c in data:\n k=key2 |2\n c ^=((k *(k ^1))>>8)&0xFF\n update_keys(c)\n append(c)\n return bytes(result)\n \n return decrypter\n \n \nclass LZMACompressor:\n\n def __init__(self):\n self._comp=None\n \n def _init(self):\n props=lzma._encode_filter_properties({'id':lzma.FILTER_LZMA1})\n self._comp=lzma.LZMACompressor(lzma.FORMAT_RAW,filters=[\n lzma._decode_filter_properties(lzma.FILTER_LZMA1,props)\n ])\n return struct.pack('')\n return ''.join(result)\n \n def readline(self,limit=-1):\n ''\n\n\n \n \n if limit <0:\n \n i=self._readbuffer.find(b'\\n',self._offset)+1\n if i >0:\n line=self._readbuffer[self._offset:i]\n self._offset=i\n return line\n \n return io.BufferedIOBase.readline(self,limit)\n \n def peek(self,n=1):\n ''\n if n >len(self._readbuffer)-self._offset:\n chunk=self.read(n)\n if len(chunk)>self._offset:\n self._readbuffer=chunk+self._readbuffer[self._offset:]\n self._offset=0\n else :\n self._offset -=len(chunk)\n \n \n return self._readbuffer[self._offset:self._offset+512]\n \n def readable(self):\n return True\n \n def read(self,n=-1):\n ''\n\n \n if n is None or n <0:\n buf=self._readbuffer[self._offset:]\n self._readbuffer=b''\n self._offset=0\n while not self._eof:\n buf +=self._read1(self.MAX_N)\n return buf\n \n end=n+self._offset\n if end 0 and not self._eof:\n data=self._read1(n)\n if n 0:\n while not self._eof:\n data=self._read1(n)\n if n len(data):\n data +=self._read2(n -len(data))\n else :\n data=self._read2(n)\n \n if self._compress_type ==ZIP_STORED:\n self._eof=self._compress_left <=0\n elif self._compress_type ==ZIP_DEFLATED:\n n=max(n,self.MIN_READ_SIZE)\n data=self._decompressor.decompress(data,n)\n self._eof=(self._decompressor.eof or\n self._compress_left <=0 and\n not self._decompressor.unconsumed_tail)\n if self._eof:\n data +=self._decompressor.flush()\n else :\n data=self._decompressor.decompress(data)\n self._eof=self._decompressor.eof or self._compress_left <=0\n \n data=data[:self._left]\n self._left -=len(data)\n if self._left <=0:\n self._eof=True\n self._update_crc(data)\n return data\n \n def _read2(self,n):\n if self._compress_left <=0:\n return b''\n \n n=max(n,self.MIN_READ_SIZE)\n n=min(n,self._compress_left)\n \n data=self._fileobj.read(n)\n self._compress_left -=len(data)\n if not data:\n raise EOFError\n \n if self._decrypter is not None :\n data=self._decrypter(data)\n return data\n \n def close(self):\n try :\n if self._close_fileobj:\n self._fileobj.close()\n finally :\n super().close()\n \n def seekable(self):\n return self._seekable\n \n def seek(self,offset,whence=0):\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n curr_pos=self.tell()\n if whence ==0:\n new_pos=offset\n elif whence ==1:\n new_pos=curr_pos+offset\n elif whence ==2:\n new_pos=self._orig_file_size+offset\n else :\n raise ValueError(\"whence must be os.SEEK_SET (0), \"\n \"os.SEEK_CUR (1), or os.SEEK_END (2)\")\n \n if new_pos >self._orig_file_size:\n new_pos=self._orig_file_size\n \n if new_pos <0:\n new_pos=0\n \n read_offset=new_pos -curr_pos\n buff_offset=read_offset+self._offset\n \n if buff_offset >=0 and buff_offset 0:\n read_len=min(self.MAX_SEEK_READ,read_offset)\n self.read(read_len)\n read_offset -=read_len\n \n return self.tell()\n \n def tell(self):\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n filepos=self._orig_file_size -self._left -len(self._readbuffer)+self._offset\n return filepos\n \n \nclass _ZipWriteFile(io.BufferedIOBase):\n def __init__(self,zf,zinfo,zip64):\n self._zinfo=zinfo\n self._zip64=zip64\n self._zipfile=zf\n self._compressor=_get_compressor(zinfo.compress_type,\n zinfo._compresslevel)\n self._file_size=0\n self._compress_size=0\n self._crc=0\n \n @property\n def _fileobj(self):\n return self._zipfile.fp\n \n def writable(self):\n return True\n \n def write(self,data):\n if self.closed:\n raise ValueError('I/O operation on closed file.')\n nbytes=len(data)\n self._file_size +=nbytes\n self._crc=crc32(data,self._crc)\n if self._compressor:\n data=self._compressor.compress(data)\n self._compress_size +=len(data)\n self._fileobj.write(data)\n return nbytes\n \n def close(self):\n if self.closed:\n return\n super().close()\n \n if self._compressor:\n buf=self._compressor.flush()\n self._compress_size +=len(buf)\n self._fileobj.write(buf)\n self._zinfo.compress_size=self._compress_size\n else :\n self._zinfo.compress_size=self._file_size\n self._zinfo.CRC=self._crc\n self._zinfo.file_size=self._file_size\n \n \n if self._zinfo.flag_bits&0x08:\n \n fmt='ZIP64_LIMIT:\n raise RuntimeError('File size unexpectedly exceeded ZIP64 '\n 'limit')\n if self._compress_size >ZIP64_LIMIT:\n raise RuntimeError('Compressed size unexpectedly exceeded '\n 'ZIP64 limit')\n \n \n \n \n self._zipfile.start_dir=self._fileobj.tell()\n self._fileobj.seek(self._zinfo.header_offset)\n self._fileobj.write(self._zinfo.FileHeader(self._zip64))\n self._fileobj.seek(self._zipfile.start_dir)\n \n self._zipfile._writing=False\n \n \n self._zipfile.filelist.append(self._zinfo)\n self._zipfile.NameToInfo[self._zinfo.filename]=self._zinfo\n \nclass ZipFile:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n fp=None\n _windows_illegal_name_trans_table=None\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,allowZip64=True ,\n compresslevel=None ):\n ''\n \n if mode not in ('r','w','x','a'):\n raise ValueError(\"ZipFile requires mode 'r', 'w', 'x', or 'a'\")\n \n _check_compression(compression)\n \n self._allowZip64=allowZip64\n self._didModify=False\n self.debug=0\n self.NameToInfo={}\n self.filelist=[]\n self.compression=compression\n self.compresslevel=compresslevel\n self.mode=mode\n self.pwd=None\n self._comment=b''\n \n \n if isinstance(file,os.PathLike):\n file=os.fspath(file)\n if isinstance(file,str):\n \n self._filePassed=0\n self.filename=file\n modeDict={'r':'rb','w':'w+b','x':'x+b','a':'r+b',\n 'r+b':'w+b','w+b':'wb','x+b':'xb'}\n filemode=modeDict[mode]\n while True :\n try :\n self.fp=io.open(file,filemode)\n except OSError:\n if filemode in modeDict:\n filemode=modeDict[filemode]\n continue\n raise\n break\n else :\n self._filePassed=1\n self.fp=file\n self.filename=getattr(file,'name',None )\n self._fileRefCnt=1\n self._lock=threading.RLock()\n self._seekable=True\n self._writing=False\n \n try :\n if mode =='r':\n self._RealGetContents()\n elif mode in ('w','x'):\n \n \n self._didModify=True\n try :\n self.start_dir=self.fp.tell()\n except (AttributeError,OSError):\n self.fp=_Tellable(self.fp)\n self.start_dir=0\n self._seekable=False\n else :\n \n try :\n self.fp.seek(self.start_dir)\n except (AttributeError,OSError):\n self._seekable=False\n elif mode =='a':\n try :\n \n self._RealGetContents()\n \n self.fp.seek(self.start_dir)\n except BadZipFile:\n \n self.fp.seek(0,2)\n \n \n \n self._didModify=True\n self.start_dir=self.fp.tell()\n else :\n raise ValueError(\"Mode must be 'r', 'w', 'x', or 'a'\")\n except :\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n raise\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,traceback):\n self.close()\n \n def __repr__(self):\n result=['<%s.%s'%(self.__class__.__module__,\n self.__class__.__qualname__)]\n if self.fp is not None :\n if self._filePassed:\n result.append(' file=%r'%self.fp)\n elif self.filename is not None :\n result.append(' filename=%r'%self.filename)\n result.append(' mode=%r'%self.mode)\n else :\n result.append(' [closed]')\n result.append('>')\n return ''.join(result)\n \n def _RealGetContents(self):\n ''\n fp=self.fp\n try :\n endrec=_EndRecData(fp)\n except OSError:\n raise BadZipFile(\"File is not a zip file\")\n if not endrec:\n raise BadZipFile(\"File is not a zip file\")\n if self.debug >1:\n print(endrec)\n size_cd=endrec[_ECD_SIZE]\n offset_cd=endrec[_ECD_OFFSET]\n self._comment=endrec[_ECD_COMMENT]\n \n \n concat=endrec[_ECD_LOCATION]-size_cd -offset_cd\n if endrec[_ECD_SIGNATURE]==stringEndArchive64:\n \n concat -=(sizeEndCentDir64+sizeEndCentDir64Locator)\n \n if self.debug >2:\n inferred=concat+offset_cd\n print(\"given, inferred, offset\",offset_cd,inferred,concat)\n \n self.start_dir=offset_cd+concat\n fp.seek(self.start_dir,0)\n data=fp.read(size_cd)\n fp=io.BytesIO(data)\n total=0\n while total 2:\n print(centdir)\n filename=fp.read(centdir[_CD_FILENAME_LENGTH])\n flags=centdir[5]\n if flags&0x800:\n \n filename=filename.decode('utf-8')\n else :\n \n filename=filename.decode('cp437')\n \n x=ZipInfo(filename)\n x.extra=fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])\n x.comment=fp.read(centdir[_CD_COMMENT_LENGTH])\n x.header_offset=centdir[_CD_LOCAL_HEADER_OFFSET]\n (x.create_version,x.create_system,x.extract_version,x.reserved,\n x.flag_bits,x.compress_type,t,d,\n x.CRC,x.compress_size,x.file_size)=centdir[1:12]\n if x.extract_version >MAX_EXTRACT_VERSION:\n raise NotImplementedError(\"zip file version %.1f\"%\n (x.extract_version /10))\n x.volume,x.internal_attr,x.external_attr=centdir[15:18]\n \n x._raw_time=t\n x.date_time=((d >>9)+1980,(d >>5)&0xF,d&0x1F,\n t >>11,(t >>5)&0x3F,(t&0x1F)*2)\n \n x._decodeExtra()\n x.header_offset=x.header_offset+concat\n self.filelist.append(x)\n self.NameToInfo[x.filename]=x\n \n \n total=(total+sizeCentralDir+centdir[_CD_FILENAME_LENGTH]\n +centdir[_CD_EXTRA_FIELD_LENGTH]\n +centdir[_CD_COMMENT_LENGTH])\n \n if self.debug >2:\n print(\"total\",total)\n \n \n def namelist(self):\n ''\n return [data.filename for data in self.filelist]\n \n def infolist(self):\n ''\n \n return self.filelist\n \n def printdir(self,file=None ):\n ''\n print(\"%-46s %19s %12s\"%(\"File Name\",\"Modified \",\"Size\"),\n file=file)\n for zinfo in self.filelist:\n date=\"%d-%02d-%02d %02d:%02d:%02d\"%zinfo.date_time[:6]\n print(\"%-46s %s %12d\"%(zinfo.filename,date,zinfo.file_size),\n file=file)\n \n def testzip(self):\n ''\n chunk_size=2 **20\n for zinfo in self.filelist:\n try :\n \n \n with self.open(zinfo.filename,\"r\")as f:\n while f.read(chunk_size):\n pass\n except BadZipFile:\n return zinfo.filename\n \n def getinfo(self,name):\n ''\n info=self.NameToInfo.get(name)\n if info is None :\n raise KeyError(\n 'There is no item named %r in the archive'%name)\n \n return info\n \n def setpassword(self,pwd):\n ''\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if pwd:\n self.pwd=pwd\n else :\n self.pwd=None\n \n @property\n def comment(self):\n ''\n return self._comment\n \n @comment.setter\n def comment(self,comment):\n if not isinstance(comment,bytes):\n raise TypeError(\"comment: expected bytes, got %s\"%type(comment).__name__)\n \n if len(comment)>ZIP_MAX_COMMENT:\n import warnings\n warnings.warn('Archive comment is too long; truncating to %d bytes'\n %ZIP_MAX_COMMENT,stacklevel=2)\n comment=comment[:ZIP_MAX_COMMENT]\n self._comment=comment\n self._didModify=True\n \n def read(self,name,pwd=None ):\n ''\n with self.open(name,\"r\",pwd)as fp:\n return fp.read()\n \n def open(self,name,mode=\"r\",pwd=None ,*,force_zip64=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if mode not in {\"r\",\"w\"}:\n raise ValueError('open() requires mode \"r\" or \"w\"')\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if pwd and (mode ==\"w\"):\n raise ValueError(\"pwd is only supported for reading files\")\n if not self.fp:\n raise ValueError(\n \"Attempt to use ZIP archive that was already closed\")\n \n \n if isinstance(name,ZipInfo):\n \n zinfo=name\n elif mode =='w':\n zinfo=ZipInfo(name)\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n else :\n \n zinfo=self.getinfo(name)\n \n if mode =='w':\n return self._open_to_write(zinfo,force_zip64=force_zip64)\n \n if self._writing:\n raise ValueError(\"Can't read from the ZIP file while there \"\n \"is an open writing handle on it. \"\n \"Close the writing handle before trying to read.\")\n \n \n self._fileRefCnt +=1\n zef_file=_SharedFile(self.fp,zinfo.header_offset,\n self._fpclose,self._lock,lambda :self._writing)\n try :\n \n fheader=zef_file.read(sizeFileHeader)\n if len(fheader)!=sizeFileHeader:\n raise BadZipFile(\"Truncated file header\")\n fheader=struct.unpack(structFileHeader,fheader)\n if fheader[_FH_SIGNATURE]!=stringFileHeader:\n raise BadZipFile(\"Bad magic number for file header\")\n \n fname=zef_file.read(fheader[_FH_FILENAME_LENGTH])\n if fheader[_FH_EXTRA_FIELD_LENGTH]:\n zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])\n \n if zinfo.flag_bits&0x20:\n \n raise NotImplementedError(\"compressed patched data (flag bit 5)\")\n \n if zinfo.flag_bits&0x40:\n \n raise NotImplementedError(\"strong encryption (flag bit 6)\")\n \n if zinfo.flag_bits&0x800:\n \n fname_str=fname.decode(\"utf-8\")\n else :\n fname_str=fname.decode(\"cp437\")\n \n if fname_str !=zinfo.orig_filename:\n raise BadZipFile(\n 'File name in directory %r and header %r differ.'\n %(zinfo.orig_filename,fname))\n \n \n is_encrypted=zinfo.flag_bits&0x1\n zd=None\n if is_encrypted:\n if not pwd:\n pwd=self.pwd\n if not pwd:\n raise RuntimeError(\"File %r is encrypted, password \"\n \"required for extraction\"%name)\n \n zd=_ZipDecrypter(pwd)\n \n \n \n \n \n header=zef_file.read(12)\n h=zd(header[0:12])\n if zinfo.flag_bits&0x8:\n \n check_byte=(zinfo._raw_time >>8)&0xff\n else :\n \n check_byte=(zinfo.CRC >>24)&0xff\n if h[11]!=check_byte:\n raise RuntimeError(\"Bad password for file %r\"%name)\n \n return ZipExtFile(zef_file,mode,zinfo,zd,True )\n except :\n zef_file.close()\n raise\n \n def _open_to_write(self,zinfo,force_zip64=False ):\n if force_zip64 and not self._allowZip64:\n raise ValueError(\n \"force_zip64 is True, but allowZip64 was False when opening \"\n \"the ZIP file.\"\n )\n if self._writing:\n raise ValueError(\"Can't write to the ZIP file while there is \"\n \"another write handle open on it. \"\n \"Close the first handle before opening another.\")\n \n \n if not hasattr(zinfo,'file_size'):\n zinfo.file_size=0\n zinfo.compress_size=0\n zinfo.CRC=0\n \n zinfo.flag_bits=0x00\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=0x02\n if not self._seekable:\n zinfo.flag_bits |=0x08\n \n if not zinfo.external_attr:\n zinfo.external_attr=0o600 <<16\n \n \n zip64=self._allowZip64 and\\\n (force_zip64 or zinfo.file_size *1.05 >ZIP64_LIMIT)\n \n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.fp.write(zinfo.FileHeader(zip64))\n \n self._writing=True\n return _ZipWriteFile(self,zinfo,zip64)\n \n def extract(self,member,path=None ,pwd=None ):\n ''\n\n\n\n \n if path is None :\n path=os.getcwd()\n else :\n path=os.fspath(path)\n \n return self._extract_member(member,path,pwd)\n \n def extractall(self,path=None ,members=None ,pwd=None ):\n ''\n\n\n\n \n if members is None :\n members=self.namelist()\n \n if path is None :\n path=os.getcwd()\n else :\n path=os.fspath(path)\n \n for zipinfo in members:\n self._extract_member(zipinfo,path,pwd)\n \n @classmethod\n def _sanitize_windows_name(cls,arcname,pathsep):\n ''\n table=cls._windows_illegal_name_trans_table\n if not table:\n illegal=':<>|\"?*'\n table=str.maketrans(illegal,'_'*len(illegal))\n cls._windows_illegal_name_trans_table=table\n arcname=arcname.translate(table)\n \n arcname=(x.rstrip('.')for x in arcname.split(pathsep))\n \n arcname=pathsep.join(x for x in arcname if x)\n return arcname\n \n def _extract_member(self,member,targetpath,pwd):\n ''\n\n \n if not isinstance(member,ZipInfo):\n member=self.getinfo(member)\n \n \n \n arcname=member.filename.replace('/',os.path.sep)\n \n if os.path.altsep:\n arcname=arcname.replace(os.path.altsep,os.path.sep)\n \n \n arcname=os.path.splitdrive(arcname)[1]\n invalid_path_parts=('',os.path.curdir,os.path.pardir)\n arcname=os.path.sep.join(x for x in arcname.split(os.path.sep)\n if x not in invalid_path_parts)\n if os.path.sep =='\\\\':\n \n arcname=self._sanitize_windows_name(arcname,os.path.sep)\n \n targetpath=os.path.join(targetpath,arcname)\n targetpath=os.path.normpath(targetpath)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n \n if member.is_dir():\n if not os.path.isdir(targetpath):\n os.mkdir(targetpath)\n return targetpath\n \n with self.open(member,pwd=pwd)as source,\\\n open(targetpath,\"wb\")as target:\n shutil.copyfileobj(source,target)\n \n return targetpath\n \n def _writecheck(self,zinfo):\n ''\n if zinfo.filename in self.NameToInfo:\n import warnings\n warnings.warn('Duplicate name: %r'%zinfo.filename,stacklevel=3)\n if self.mode not in ('w','x','a'):\n raise ValueError(\"write() requires mode 'w', 'x', or 'a'\")\n if not self.fp:\n raise ValueError(\n \"Attempt to write ZIP archive that was already closed\")\n _check_compression(zinfo.compress_type)\n if not self._allowZip64:\n requires_zip64=None\n if len(self.filelist)>=ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif zinfo.file_size >ZIP64_LIMIT:\n requires_zip64=\"Filesize\"\n elif zinfo.header_offset >ZIP64_LIMIT:\n requires_zip64=\"Zipfile size\"\n if requires_zip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n \n def write(self,filename,arcname=None ,\n compress_type=None ,compresslevel=None ):\n ''\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists\"\n )\n \n zinfo=ZipInfo.from_file(filename,arcname)\n \n if zinfo.is_dir():\n zinfo.compress_size=0\n zinfo.CRC=0\n else :\n if compress_type is not None :\n zinfo.compress_type=compress_type\n else :\n zinfo.compress_type=self.compression\n \n if compresslevel is not None :\n zinfo._compresslevel=compresslevel\n else :\n zinfo._compresslevel=self.compresslevel\n \n if zinfo.is_dir():\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=0x02\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.filelist.append(zinfo)\n self.NameToInfo[zinfo.filename]=zinfo\n self.fp.write(zinfo.FileHeader(False ))\n self.start_dir=self.fp.tell()\n else :\n with open(filename,\"rb\")as src,self.open(zinfo,'w')as dest:\n shutil.copyfileobj(src,dest,1024 *8)\n \n def writestr(self,zinfo_or_arcname,data,\n compress_type=None ,compresslevel=None ):\n ''\n\n\n\n \n if isinstance(data,str):\n data=data.encode(\"utf-8\")\n if not isinstance(zinfo_or_arcname,ZipInfo):\n zinfo=ZipInfo(filename=zinfo_or_arcname,\n date_time=time.localtime(time.time())[:6])\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n if zinfo.filename[-1]=='/':\n zinfo.external_attr=0o40775 <<16\n zinfo.external_attr |=0x10\n else :\n zinfo.external_attr=0o600 <<16\n else :\n zinfo=zinfo_or_arcname\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists.\"\n )\n \n if compress_type is not None :\n zinfo.compress_type=compress_type\n \n if compresslevel is not None :\n zinfo._compresslevel=compresslevel\n \n zinfo.file_size=len(data)\n with self._lock:\n with self.open(zinfo,mode='w')as dest:\n dest.write(data)\n \n def __del__(self):\n ''\n self.close()\n \n def close(self):\n ''\n \n if self.fp is None :\n return\n \n if self._writing:\n raise ValueError(\"Can't close the ZIP file while there is \"\n \"an open writing handle on it. \"\n \"Close the writing handle before closing the zip.\")\n \n try :\n if self.mode in ('w','x','a')and self._didModify:\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n self._write_end_record()\n finally :\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n \n def _write_end_record(self):\n for zinfo in self.filelist:\n dt=zinfo.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n extra=[]\n if zinfo.file_size >ZIP64_LIMIT\\\n or zinfo.compress_size >ZIP64_LIMIT:\n extra.append(zinfo.file_size)\n extra.append(zinfo.compress_size)\n file_size=0xffffffff\n compress_size=0xffffffff\n else :\n file_size=zinfo.file_size\n compress_size=zinfo.compress_size\n \n if zinfo.header_offset >ZIP64_LIMIT:\n extra.append(zinfo.header_offset)\n header_offset=0xffffffff\n else :\n header_offset=zinfo.header_offset\n \n extra_data=zinfo.extra\n min_version=0\n if extra:\n \n extra_data=struct.pack(\n 'ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif centDirOffset >ZIP64_LIMIT:\n requires_zip64=\"Central directory offset\"\n elif centDirSize >ZIP64_LIMIT:\n requires_zip64=\"Central directory size\"\n if requires_zip64:\n \n if not self._allowZip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n zip64endrec=struct.pack(\n structEndArchive64,stringEndArchive64,\n 44,45,45,0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset)\n self.fp.write(zip64endrec)\n \n zip64locrec=struct.pack(\n structEndArchive64Locator,\n stringEndArchive64Locator,0,pos2,1)\n self.fp.write(zip64locrec)\n centDirCount=min(centDirCount,0xFFFF)\n centDirSize=min(centDirSize,0xFFFFFFFF)\n centDirOffset=min(centDirOffset,0xFFFFFFFF)\n \n endrec=struct.pack(structEndArchive,stringEndArchive,\n 0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset,len(self._comment))\n self.fp.write(endrec)\n self.fp.write(self._comment)\n self.fp.flush()\n \n def _fpclose(self,fp):\n assert self._fileRefCnt >0\n self._fileRefCnt -=1\n if not self._fileRefCnt and not self._filePassed:\n fp.close()\n \n \nclass PyZipFile(ZipFile):\n ''\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,\n allowZip64=True ,optimize=-1):\n ZipFile.__init__(self,file,mode=mode,compression=compression,\n allowZip64=allowZip64)\n self._optimize=optimize\n \n def writepy(self,pathname,basename=\"\",filterfunc=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n pathname=os.fspath(pathname)\n if filterfunc and not filterfunc(pathname):\n if self.debug:\n label='path'if os.path.isdir(pathname)else 'file'\n print('%s %r skipped by filterfunc'%(label,pathname))\n return\n dir,name=os.path.split(pathname)\n if os.path.isdir(pathname):\n initname=os.path.join(pathname,\"__init__.py\")\n if os.path.isfile(initname):\n \n if basename:\n basename=\"%s/%s\"%(basename,name)\n else :\n basename=name\n if self.debug:\n print(\"Adding package in\",pathname,\"as\",basename)\n fname,arcname=self._get_codename(initname[0:-3],basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n dirlist=sorted(os.listdir(pathname))\n dirlist.remove(\"__init__.py\")\n \n for filename in dirlist:\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if os.path.isdir(path):\n if os.path.isfile(os.path.join(path,\"__init__.py\")):\n \n self.writepy(path,basename,\n filterfunc=filterfunc)\n elif ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else :\n \n if self.debug:\n print(\"Adding files from directory\",pathname)\n for filename in sorted(os.listdir(pathname)):\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else :\n if pathname[-3:]!=\".py\":\n raise RuntimeError(\n 'Files added with writepy() must end with \".py\"')\n fname,arcname=self._get_codename(pathname[0:-3],basename)\n if self.debug:\n print(\"Adding file\",arcname)\n self.write(fname,arcname)\n \n def _get_codename(self,pathname,basename):\n ''\n\n\n\n\n \n def _compile(file,optimize=-1):\n import py_compile\n if self.debug:\n print(\"Compiling\",file)\n try :\n py_compile.compile(file,doraise=True ,optimize=optimize)\n except py_compile.PyCompileError as err:\n print(err.msg)\n return False\n return True\n \n file_py=pathname+\".py\"\n file_pyc=pathname+\".pyc\"\n pycache_opt0=importlib.util.cache_from_source(file_py,optimization='')\n pycache_opt1=importlib.util.cache_from_source(file_py,optimization=1)\n pycache_opt2=importlib.util.cache_from_source(file_py,optimization=2)\n if self._optimize ==-1:\n \n if (os.path.isfile(file_pyc)and\n os.stat(file_pyc).st_mtime >=os.stat(file_py).st_mtime):\n \n arcname=fname=file_pyc\n elif (os.path.isfile(pycache_opt0)and\n os.stat(pycache_opt0).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt0\n arcname=file_pyc\n elif (os.path.isfile(pycache_opt1)and\n os.stat(pycache_opt1).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt1\n arcname=file_pyc\n elif (os.path.isfile(pycache_opt2)and\n os.stat(pycache_opt2).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt2\n arcname=file_pyc\n else :\n \n if _compile(file_py):\n if sys.flags.optimize ==0:\n fname=pycache_opt0\n elif sys.flags.optimize ==1:\n fname=pycache_opt1\n else :\n fname=pycache_opt2\n arcname=file_pyc\n else :\n fname=arcname=file_py\n else :\n \n if self._optimize ==0:\n fname=pycache_opt0\n arcname=file_pyc\n else :\n arcname=file_pyc\n if self._optimize ==1:\n fname=pycache_opt1\n elif self._optimize ==2:\n fname=pycache_opt2\n else :\n msg=\"invalid value for 'optimize': {!r}\".format(self._optimize)\n raise ValueError(msg)\n if not (os.path.isfile(fname)and\n os.stat(fname).st_mtime >=os.stat(file_py).st_mtime):\n if not _compile(file_py,optimize=self._optimize):\n fname=arcname=file_py\n archivename=os.path.split(arcname)[1]\n if basename:\n archivename=\"%s/%s\"%(basename,archivename)\n return (fname,archivename)\n \n \ndef main(args=None ):\n import argparse\n \n description='A simple command-line interface for zipfile module.'\n parser=argparse.ArgumentParser(description=description)\n group=parser.add_mutually_exclusive_group(required=True )\n group.add_argument('-l','--list',metavar='',\n help='Show listing of a zipfile')\n group.add_argument('-e','--extract',nargs=2,\n metavar=('',''),\n help='Extract zipfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create zipfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a zipfile is valid')\n args=parser.parse_args(args)\n \n if args.test is not None :\n src=args.test\n with ZipFile(src,'r')as zf:\n badfile=zf.testzip()\n if badfile:\n print(\"The following enclosed file is corrupted: {!r}\".format(badfile))\n print(\"Done testing\")\n \n elif args.list is not None :\n src=args.list\n with ZipFile(src,'r')as zf:\n zf.printdir()\n \n elif args.extract is not None :\n src,curdir=args.extract\n with ZipFile(src,'r')as zf:\n zf.extractall(curdir)\n \n elif args.create is not None :\n zip_name=args.create.pop(0)\n files=args.create\n \n def addToZip(zf,path,zippath):\n if os.path.isfile(path):\n zf.write(path,zippath,ZIP_DEFLATED)\n elif os.path.isdir(path):\n if zippath:\n zf.write(path,zippath)\n for nm in sorted(os.listdir(path)):\n addToZip(zf,\n os.path.join(path,nm),os.path.join(zippath,nm))\n \n \n with ZipFile(zip_name,'w')as zf:\n for path in files:\n zippath=os.path.basename(path)\n if not zippath:\n zippath=os.path.basename(os.path.dirname(path))\n if zippath in ('',os.curdir,os.pardir):\n zippath=''\n addToZip(zf,path,zippath)\n \nif __name__ ==\"__main__\":\n main()\n", ["argparse", "binascii", "bz2", "importlib.util", "io", "lzma", "os", "py_compile", "shutil", "stat", "struct", "sys", "threading", "time", "warnings", "zlib"]], "zlib": [".py", "from _zlib_utils import lz_generator\n\nclass BitIO:\n\n def __init__(self,bytestream=b''):\n self.bytestream=bytearray(bytestream)\n self.bytenum=0\n self.bitnum=0\n \n @property\n def pos(self):\n return self.bytenum *8+self.bitnum\n \n def read(self,nb,order=\"lsf\",trace=False ):\n result=0\n coef=1 if order ==\"lsf\"else 2 **(nb -1)\n for _ in range(nb):\n if self.bitnum ==8:\n if self.bytenum ==len(self.bytestream)-1:\n return None\n self.bytenum +=1\n self.bitnum=0\n mask=2 **self.bitnum\n if trace:\n print(\"bit\",int(bool(mask&self.bytestream[self.bytenum])))\n result +=coef *bool(mask&self.bytestream[self.bytenum])\n self.bitnum +=1\n if order ==\"lsf\":\n coef *=2\n else :\n coef //=2\n return result\n \n def move(self,nb):\n if nb ==0:\n return\n elif nb >0:\n bitpos=self.bitnum+nb\n while bitpos >7:\n self.bytenum +=1\n if self.bytenum ==len(self.bytestream):\n raise Exception(\"can't move {} bits\".format(nb))\n bitpos -=8\n self.bitnum=bitpos\n else :\n bitpos=self.bitnum+nb\n while bitpos <0:\n self.bytenum -=1\n if self.bytenum ==-1:\n raise Exception(\"can't move {} bits\".format(nb))\n bitpos +=8\n self.bitnum=bitpos\n \n def show(self):\n res=\"\"\n for x in self.bytestream:\n s=str(bin(x))[2:]\n s=\"0\"*(8 -len(s))+s\n res +=s+\" \"\n return res\n \n def write(self,*bits):\n for bit in bits:\n if not self.bytestream:\n self.bytestream.append(0)\n byte=self.bytestream[self.bytenum]\n if self.bitnum ==8:\n if self.bytenum ==len(self.bytestream)-1:\n byte=0\n self.bytestream +=bytes([byte])\n self.bytenum +=1\n self.bitnum=0\n mask=2 **self.bitnum\n if bit:\n byte |=mask\n else :\n byte &=~mask\n self.bytestream[self.bytenum]=byte\n self.bitnum +=1\n \n def write_int(self,value,nb,order=\"lsf\"):\n ''\n if value >=2 **nb:\n raise ValueError(\"can't write value on {} bits\".format(nb))\n bits=[]\n while value:\n bits.append(value&1)\n value >>=1\n \n bits=bits+[0]*(nb -len(bits))\n if order !=\"lsf\":\n bits.reverse()\n assert len(bits)==nb\n self.write(*bits)\n \nclass ResizeError(Exception):\n pass\n \n \nclass Node:\n\n def __init__(self,char=None ,weight=0,level=0):\n self.char=char\n self.is_leaf=char is not None\n self.level=level\n self.weight=weight\n \n def add(self,children):\n self.children=children\n for child in self.children:\n child.parent=self\n child.level=self.level+1\n \n \nclass Tree:\n\n def __init__(self,root):\n self.root=root\n \n def length(self):\n self.root.level=0\n node=self.root\n nb_levels=0\n def set_level(node):\n nonlocal nb_levels\n for child in node.children:\n child.level=node.level+1\n nb_levels=max(nb_levels,child.level)\n if not child.is_leaf:\n set_level(child)\n set_level(self.root)\n return nb_levels\n \n def reduce_tree(self):\n ''\n\n\n \n currentlen=self.length()\n deepest=self.nodes_at(currentlen)\n deepest_leaves=[node for node in deepest if node.is_leaf]\n rightmost_leaf=deepest_leaves[-1]\n sibling=rightmost_leaf.parent.children[0]\n \n \n parent=rightmost_leaf.parent\n grand_parent=parent.parent\n rank=grand_parent.children.index(parent)\n children=grand_parent.children\n children[rank]=rightmost_leaf\n grand_parent.add(children)\n \n \n up_level=rightmost_leaf.level -2\n while up_level >0:\n nodes=self.nodes_at(up_level)\n leaf_nodes=[node for node in nodes if node.is_leaf]\n if leaf_nodes:\n leftmost_leaf=leaf_nodes[0]\n \n parent=leftmost_leaf.parent\n rank=parent.children.index(leftmost_leaf)\n new_node=Node()\n new_node.level=leftmost_leaf.level\n children=[sibling,leftmost_leaf]\n new_node.add(children)\n parent.children[rank]=new_node\n new_node.parent=parent\n break\n else :\n up_level -=1\n if up_level ==0:\n raise ResizeError\n \n def nodes_at(self,level,top=None ):\n ''\n res=[]\n if top is None :\n top=self.root\n if top.level ==level:\n res=[top]\n elif not top.is_leaf:\n for child in top.children:\n res +=self.nodes_at(level,child)\n return res\n \n def reduce(self,maxlevels):\n ''\n while self.length()>maxlevels:\n self.reduce_tree()\n \n def codes(self,node=None ,code=''):\n ''\n \n if node is None :\n self.dic={}\n node=self.root\n if node.is_leaf:\n self.dic[node.char]=code\n else :\n for i,child in enumerate(node.children):\n self.codes(child,code+str(i))\n return self.dic\n \n \ndef codelengths_from_frequencies(freqs):\n ''\n\n\n\n \n freqs=sorted(freqs.items(),\n key=lambda item:(item[1],-item[0]),reverse=True )\n nodes=[Node(char=key,weight=value)for (key,value)in freqs]\n while len(nodes)>1:\n right,left=nodes.pop(),nodes.pop()\n node=Node(weight=right.weight+left.weight)\n node.add([left,right])\n if not nodes:\n nodes.append(node)\n else :\n pos=0\n while pos node.weight:\n pos +=1\n nodes.insert(pos,node)\n \n top=nodes[0]\n tree=Tree(top)\n tree.reduce(15)\n \n codes=tree.codes()\n \n code_items=list(codes.items())\n code_items.sort(key=lambda item:(len(item[1]),item[0]))\n return [(car,len(value))for car,value in code_items]\n \ndef normalized(codelengths):\n car,codelength=codelengths[0]\n value=0\n codes={car:\"0\"*codelength}\n \n for (newcar,nbits)in codelengths[1:]:\n value +=1\n bvalue=str(bin(value))[2:]\n bvalue=\"0\"*(codelength -len(bvalue))+bvalue\n if nbits >codelength:\n codelength=nbits\n bvalue +=\"0\"*(codelength -len(bvalue))\n value=int(bvalue,2)\n assert len(bvalue)==nbits\n codes[newcar]=bvalue\n \n return codes\n \ndef make_tree(node,codes):\n if not hasattr(node,\"parent\"):\n node.code=''\n children=[]\n for bit in '01':\n next_code=node.code+bit\n if next_code in codes:\n child=Node(char=codes[next_code])\n else :\n child=Node()\n child.code=next_code\n children.append(child)\n node.add(children)\n for child in children:\n if not child.is_leaf:\n make_tree(child,codes)\n \ndef decompresser(codelengths):\n lengths=list(codelengths.items())\n \n lengths=[x for x in lengths if x[1]>0]\n lengths.sort(key=lambda item:(item[1],item[0]))\n codes=normalized(lengths)\n codes={value:key for key,value in codes.items()}\n root=Node()\n make_tree(root,codes)\n return {\"root\":root,\"codes\":codes}\n \ndef tree_from_codelengths(codelengths):\n return decompresser(codelengths)[\"root\"]\n \nclass Error(Exception):\n pass\n \n \nfixed_codelengths={}\nfor car in range(144):\n fixed_codelengths[car]=8\nfor car in range(144,256):\n fixed_codelengths[car]=9\nfor car in range(256,280):\n fixed_codelengths[car]=7\nfor car in range(280,288):\n fixed_codelengths[car]=8\n \nfixed_decomp=decompresser(fixed_codelengths)\nfixed_lit_len_tree=fixed_decomp[\"root\"]\nfixed_lit_len_codes={value:key\nfor (key,value)in fixed_decomp[\"codes\"].items()}\n\ndef cl_encode(lengths):\n ''\n \n dic={char:len(code)for (char,code)in lengths.items()}\n items=[dic.get(i,0)for i in range(max(dic)+1)]\n pos=0\n while pos 3:\n yield (16,3)\n nb -=3\n yield (16,nb)\n pos +=repeat+1\n \ndef read_codelengths(reader,root,num):\n ''\n\n \n node=root\n lengths=[]\n nb=0\n while len(lengths)256:\n \n if child.char <265:\n length=child.char -254\n elif child.char <269:\n length=11+2 *(child.char -265)+reader.read(1)\n elif child.char <273:\n length=19+4 *(child.char -269)+reader.read(2)\n elif child.char <277:\n length=35+8 *(child.char -273)+reader.read(3)\n elif child.char <281:\n length=67+16 *(child.char -277)+reader.read(4)\n elif child.char <285:\n length=131+31 *(child.char -281)+reader.read(5)\n elif child.char ==285:\n length=258\n return (\"length\",length)\n else :\n node=child\n \ndef adler32(source):\n a=1\n b=0\n for byte in source:\n a +=byte\n a %=65521\n b +=a\n b %=65521\n return a,b\n \n \ndef compress_dynamic(out,source,store,lit_len_count,distance_count):\n\n\n lit_len_count[256]=1\n \n \n \n \n \n \n lit_len_codes=normalized(codelengths_from_frequencies(lit_len_count))\n HLIT=1+max(lit_len_codes)-257\n \n \n \n \n \n \n coded_lit_len=list(cl_encode(lit_len_codes))\n \n \n distance_codes=normalized(codelengths_from_frequencies(distance_count))\n HDIST=max(distance_codes)\n coded_distance=list(cl_encode(distance_codes))\n \n \n codelengths_count={}\n for coded in coded_lit_len,coded_distance:\n for item in coded:\n length=item[0]if isinstance(item,tuple)else item\n codelengths_count[length]=codelengths_count.get(length,0)+1\n \n \n codelengths_codes=normalized(\n codelengths_from_frequencies(codelengths_count))\n codelengths_dict={char:len(value)\n for (char,value)in codelengths_codes.items()}\n \n alphabet=(16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,\n 15)\n \n \n codelengths_list=[codelengths_dict.get(car,0)for car in alphabet]\n \n while codelengths_list[-1]==0:\n codelengths_list.pop()\n HCLEN=len(codelengths_list)-4\n \n out.write(0,1)\n \n out.write_int(HLIT,5)\n out.write_int(HDIST,5)\n out.write_int(HCLEN,4)\n \n \n for length,car in zip(codelengths_list,alphabet):\n out.write_int(length,3)\n \n \n for item in coded_lit_len+coded_distance:\n if isinstance(item,tuple):\n length,extra=item\n code=codelengths_codes[length]\n value,nbits=int(code,2),len(code)\n out.write_int(value,nbits,order=\"msf\")\n if length ==16:\n out.write_int(extra,2)\n elif length ==17:\n out.write_int(extra,3)\n elif length ==18:\n out.write_int(extra,7)\n else :\n code=codelengths_codes[item]\n value,nbits=int(code,2),len(code)\n out.write_int(value,nbits,order=\"msf\")\n \n \n for item in store:\n if isinstance(item,tuple):\n length,extra_length,distance,extra_distance=item\n \n code=lit_len_codes[length]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \n value,nb=extra_length\n if nb:\n out.write_int(value,nb)\n \n code=distance_codes[distance]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \n value,nb=extra_distance\n if nb:\n out.write_int(value,nb)\n else :\n literal=item\n code=lit_len_codes[item]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \ndef compress_fixed(out,source,items):\n ''\n print(\"fixed\",items)\n out.write(1,0)\n \n for item in items:\n if isinstance(item,tuple):\n length,extra_length,distance,extra_distance=item\n \n code=fixed_lit_len_codes[length]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \n value,nb=extra_length\n if nb:\n out.write_int(value,nb)\n \n out.write_int(distance,5,order=\"msf\")\n \n value,nb=extra_distance\n if nb:\n out.write_int(value,nb)\n else :\n literal=item\n code=fixed_lit_len_codes[item]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \ndef compress(source,window_size=32 *1024):\n\n\n\n lit_len_count={}\n \n \n distance_count={}\n \n store=[]\n replaced=0\n nb_tuples=0\n \n for item in lz_generator(source,window_size):\n if isinstance(item,tuple):\n nb_tuples +=1\n length,distance=item\n replaced +=length\n \n \n length_code,*extra_length=length_to_code(length)\n \n lit_len_count[length_code]=lit_len_count.get(length_code,0)+1\n \n \n \n distance_code,*extra_dist=distance_to_code(distance)\n \n distance_count[distance_code]=\\\n distance_count.get(distance_code,0)+1\n \n \n store.append((length_code,extra_length,distance_code,\n extra_dist))\n else :\n literal=item\n lit_len_count[literal]=lit_len_count.get(literal,0)+1\n store.append(literal)\n \n store.append(256)\n \n \n \n \n score=replaced -100 -(nb_tuples *20 //8)\n \n \n out=BitIO()\n \n \n out.write_int(8,4)\n size=window_size >>8\n nb=0\n while size >1:\n size >>=1\n nb +=1\n out.write_int(nb,4)\n out.write_int(0x9c,8)\n \n out.write(1)\n \n if score <0:\n compress_fixed(out,source,store)\n else :\n compress_dynamic(out,source,store,lit_len_count,distance_count)\n \n \n while out.bitnum !=8:\n out.write(0)\n \n \n a,b=adler32(source)\n a1,a2=divmod(a,256)\n b1,b2=divmod(b,256)\n out.write_int(b1,8)\n out.write_int(b2,8)\n out.write_int(a1,8)\n out.write_int(a2,8)\n \n return bytes(out.bytestream)\n \ndef decompress(buf):\n reader=BitIO(buf)\n \n CM=reader.read(4)\n if CM !=8:\n raise Error(\"unsupported compression method: {}\".format(CM))\n \n CINFO=reader.read(4)\n \n FLG=reader.read(8)\n \n result=bytearray()\n \n while True :\n BFINAL=reader.read(1)\n \n BTYPE=reader.read(2)\n \n if BTYPE ==0b01:\n \n \n root=fixed_lit_len_tree\n \n while True :\n \n _type,value=read_literal_or_length(reader,root)\n if _type =='eob':\n break\n elif _type =='literal':\n result.append(value)\n elif _type =='length':\n length=value\n \n dist_code=reader.read(5,\"msf\")\n if dist_code <3:\n distance=dist_code+1\n else :\n nb=(dist_code //2)-1\n extra=reader.read(nb)\n half,delta=divmod(dist_code,2)\n distance=(1+(2 **half)+\n delta *(2 **(half -1))+extra)\n for _ in range(length):\n result.append(result[-distance])\n \n node=root\n else :\n node=child\n \n elif BTYPE ==0b10:\n \n \n \n lit_len_tree,distance_tree=dynamic_trees(reader)\n \n while True :\n \n _type,value=read_literal_or_length(reader,lit_len_tree)\n if _type =='eob':\n break\n elif _type =='literal':\n result.append(value)\n elif _type =='length':\n \n length=value\n distance=read_distance(reader,distance_tree)\n for _ in range(length):\n result.append(result[-distance])\n \n if BFINAL:\n \n b1=256 *buf[-4]+buf[-3]\n a1=256 *buf[-2]+buf[-1]\n \n a,b=adler32(result)\n \n assert a ==a1\n assert b ==b1\n \n return bytes(result)\n", ["_zlib_utils"]], "_abcoll": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) for collections, according to PEP 3119.\n\nDON'T USE THIS MODULE DIRECTLY! The classes here should be imported\nvia collections; they are defined here only to alleviate certain\nbootstrapping issues. Unit tests are in test_collections.\n\"\"\"\n\n\n\n\nimport sys\n\n__all__=[\"Hashable\",\"Iterable\",\"Iterator\",\n\"Sized\",\"Container\",\"Callable\",\n\"Set\",\"MutableSet\",\n\"Mapping\",\"MutableMapping\",\n\"MappingView\",\"KeysView\",\"ItemsView\",\"ValuesView\",\n\"Sequence\",\"MutableSequence\",\n\"ByteString\",\n]\n\n\"\"\"\n### collection related types which are not exposed through builtin ###\n## iterators ##\n#fixme brython\n#bytes_iterator = type(iter(b''))\nbytes_iterator = type(iter(''))\n#fixme brython\n#bytearray_iterator = type(iter(bytearray()))\n#callable_iterator = ???\ndict_keyiterator = type(iter({}.keys()))\ndict_valueiterator = type(iter({}.values()))\ndict_itemiterator = type(iter({}.items()))\nlist_iterator = type(iter([]))\nlist_reverseiterator = type(iter(reversed([])))\nrange_iterator = type(iter(range(0)))\nset_iterator = type(iter(set()))\nstr_iterator = type(iter(\"\"))\ntuple_iterator = type(iter(()))\nzip_iterator = type(iter(zip()))\n## views ##\ndict_keys = type({}.keys())\ndict_values = type({}.values())\ndict_items = type({}.items())\n## misc ##\ndict_proxy = type(type.__dict__)\n\"\"\"\n\ndef abstractmethod(self):\n return self\n \n \n \n \n \nclass Iterable:\n\n @abstractmethod\n def __iter__(self):\n while False :\n yield None\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Iterable:\n if any(\"__iter__\"in B.__dict__ for B in C.__mro__):\n return True\n return NotImplemented\n \n \n \nclass Sized:\n\n @abstractmethod\n def __len__(self):\n return 0\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Sized:\n if any(\"__len__\"in B.__dict__ for B in C.__mro__):\n return True\n return NotImplemented\n \n \n \nclass Container:\n\n @abstractmethod\n def __contains__(self,x):\n return False\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Container:\n if any(\"__contains__\"in B.__dict__ for B in C.__mro__):\n return True\n return NotImplemented\n \n \n \n \nclass Mapping(Sized,Iterable,Container):\n\n @abstractmethod\n def __getitem__(self,key):\n raise KeyError\n \n def get(self,key,default=None ):\n try :\n return self[key]\n except KeyError:\n return default\n \n def __contains__(self,key):\n try :\n self[key]\n except KeyError:\n return False\n else :\n return True\n \n def keys(self):\n return KeysView(self)\n \n def items(self):\n return ItemsView(self)\n \n def values(self):\n return ValuesView(self)\n \n def __eq__(self,other):\n if not isinstance(other,Mapping):\n return NotImplemented\n return dict(self.items())==dict(other.items())\n \n def __ne__(self,other):\n return not (self ==other)\n \n \nclass MutableMapping(Mapping):\n\n @abstractmethod\n def __setitem__(self,key,value):\n raise KeyError\n \n @abstractmethod\n def __delitem__(self,key):\n raise KeyError\n \n __marker=object()\n \n def pop(self,key,default=__marker):\n try :\n value=self[key]\n except KeyError:\n if default is self.__marker:\n raise\n return default\n else :\n del self[key]\n return value\n \n def popitem(self):\n try :\n key=next(iter(self))\n except StopIteration:\n raise KeyError\n value=self[key]\n del self[key]\n return key,value\n \n def clear(self):\n try :\n while True :\n self.popitem()\n except KeyError:\n pass\n \n def update(*args,**kwds):\n if len(args)>2:\n raise TypeError(\"update() takes at most 2 positional \"\n \"arguments ({} given)\".format(len(args)))\n elif not args:\n raise TypeError(\"update() takes at least 1 argument (0 given)\")\n self=args[0]\n other=args[1]if len(args)>=2 else ()\n \n if isinstance(other,Mapping):\n for key in other:\n self[key]=other[key]\n elif hasattr(other,\"keys\"):\n for key in other.keys():\n self[key]=other[key]\n else :\n for key,value in other:\n self[key]=value\n for key,value in kwds.items():\n self[key]=value\n \n def setdefault(self,key,default=None ):\n try :\n return self[key]\n except KeyError:\n self[key]=default\n return default\n \n \n", ["sys"]], "_codecs": [".py", "\ndef ascii_decode(*args,**kw):\n pass\n \ndef ascii_encode(*args,**kw):\n pass\n \ndef charbuffer_encode(*args,**kw):\n pass\n \ndef charmap_build(*args,**kw):\n pass\n \ndef charmap_decode(*args,**kw):\n pass\n \ndef charmap_encode(*args,**kw):\n pass\n \ndef decode(obj,encoding=\"utf-8\",errors=\"strict\"):\n ''\n\n\n\n\n\n \n return __BRYTHON__.decode(obj,encoding,errors)\n \ndef encode(obj,encoding=\"utf-8\",errors=\"strict\"):\n ''\n\n\n\n\n\n \n return __BRYTHON__.encode(obj,encoding,errors)\n \ndef escape_decode(*args,**kw):\n pass\n \ndef escape_encode(*args,**kw):\n pass\n \ndef latin_1_decode(*args,**kw):\n pass\n \ndef latin_1_encode(*args,**kw):\n pass\n \ndef lookup(encoding):\n ''\n\n \n if encoding in ('utf-8','utf_8'):\n from browser import console\n import encodings.utf_8\n return encodings.utf_8.getregentry()\n \n LookupError(encoding)\n \ndef lookup_error(*args,**kw):\n ''\n\n \n pass\n \ndef mbcs_decode(*args,**kw):\n pass\n \ndef mbcs_encode(*args,**kw):\n pass\n \ndef raw_unicode_escape_decode(*args,**kw):\n pass\n \ndef raw_unicode_escape_encode(*args,**kw):\n pass\n \ndef readbuffer_encode(*args,**kw):\n pass\n \ndef register(*args,**kw):\n ''\n\n\n\n \n pass\n \ndef register_error(*args,**kw):\n ''\n\n\n\n\n \n pass\n \ndef unicode_escape_decode(*args,**kw):\n pass\n \ndef unicode_escape_encode(*args,**kw):\n pass\n \ndef unicode_internal_decode(*args,**kw):\n pass\n \ndef unicode_internal_encode(*args,**kw):\n pass\n \ndef utf_16_be_decode(*args,**kw):\n pass\n \ndef utf_16_be_encode(*args,**kw):\n pass\n \ndef utf_16_decode(*args,**kw):\n pass\n \ndef utf_16_encode(*args,**kw):\n pass\n \ndef utf_16_ex_decode(*args,**kw):\n pass\n \ndef utf_16_le_decode(*args,**kw):\n pass\n \ndef utf_16_le_encode(*args,**kw):\n pass\n \ndef utf_32_be_decode(*args,**kw):\n pass\n \ndef utf_32_be_encode(*args,**kw):\n pass\n \ndef utf_32_decode(*args,**kw):\n pass\n \ndef utf_32_encode(*args,**kw):\n pass\n \ndef utf_32_ex_decode(*args,**kw):\n pass\n \ndef utf_32_le_decode(*args,**kw):\n pass\n \ndef utf_32_le_encode(*args,**kw):\n pass\n \ndef utf_7_decode(*args,**kw):\n pass\n \ndef utf_7_encode(*args,**kw):\n pass\n \ndef utf_8_decode(decoder,bytes_obj,errors,*args):\n return (bytes_obj.decode(\"utf-8\"),len(bytes_obj))\n \ndef utf_8_encode(*args,**kw):\n input=args[0]\n if len(args)==2:\n errors=args[1]\n else :\n errors=kw.get('errors','strict')\n \n \n return (bytes(input,'utf-8'),len(input))\n", ["browser", "encodings.utf_8"]], "_collections": [".py", "\n\n\n\n\n\n\n\n\n\nimport operator\n\n\n\ndef _thread_ident():\n return -1\n \n \nn=30\nLFTLNK=n\nRGTLNK=n+1\nBLOCKSIZ=n+2\n\n\n\n\n\n\n\n\nclass deque:\n\n def __new__(cls,iterable=(),*args,**kw):\n \n \n self=object.__new__(cls,*args,**kw)\n self.clear()\n return self\n \n def __init__(self,iterable=(),maxlen=None ):\n object.__init__(self)\n self.clear()\n if maxlen is not None :\n if maxlen <0:\n raise ValueError(\"maxlen must be non-negative\")\n self._maxlen=maxlen\n add=self.append\n for elem in iterable:\n add(elem)\n \n @property\n def maxlen(self):\n return self._maxlen\n \n def clear(self):\n self.right=self.left=[None ]*BLOCKSIZ\n self.rightndx=n //2\n self.leftndx=n //2+1\n self.length=0\n self.state=0\n \n def append(self,x):\n self.state +=1\n self.rightndx +=1\n if self.rightndx ==n:\n newblock=[None ]*BLOCKSIZ\n self.right[RGTLNK]=newblock\n newblock[LFTLNK]=self.right\n self.right=newblock\n self.rightndx=0\n self.length +=1\n self.right[self.rightndx]=x\n if self.maxlen is not None and self.length >self.maxlen:\n self.popleft()\n \n def appendleft(self,x):\n self.state +=1\n self.leftndx -=1\n if self.leftndx ==-1:\n newblock=[None ]*BLOCKSIZ\n self.left[LFTLNK]=newblock\n newblock[RGTLNK]=self.left\n self.left=newblock\n self.leftndx=n -1\n self.length +=1\n self.left[self.leftndx]=x\n if self.maxlen is not None and self.length >self.maxlen:\n self.pop()\n \n def extend(self,iterable):\n if iterable is self:\n iterable=list(iterable)\n for elem in iterable:\n self.append(elem)\n \n def extendleft(self,iterable):\n if iterable is self:\n iterable=list(iterable)\n for elem in iterable:\n self.appendleft(elem)\n \n def pop(self):\n if self.left is self.right and self.leftndx >self.rightndx:\n \n raise IndexError(\"pop from an empty deque\")\n x=self.right[self.rightndx]\n self.right[self.rightndx]=None\n self.length -=1\n self.rightndx -=1\n self.state +=1\n if self.rightndx ==-1:\n prevblock=self.right[LFTLNK]\n if prevblock is None :\n \n self.rightndx=n //2\n self.leftndx=n //2+1\n else :\n prevblock[RGTLNK]=None\n self.right[LFTLNK]=None\n self.right=prevblock\n self.rightndx=n -1\n return x\n \n def popleft(self):\n if self.left is self.right and self.leftndx >self.rightndx:\n \n raise IndexError(\"pop from an empty deque\")\n x=self.left[self.leftndx]\n self.left[self.leftndx]=None\n self.length -=1\n self.leftndx +=1\n self.state +=1\n if self.leftndx ==n:\n prevblock=self.left[RGTLNK]\n if prevblock is None :\n \n self.rightndx=n //2\n self.leftndx=n //2+1\n else :\n prevblock[LFTLNK]=None\n self.left[RGTLNK]=None\n self.left=prevblock\n self.leftndx=0\n return x\n \n def count(self,value):\n c=0\n for item in self:\n if item ==value:\n c +=1\n return c\n \n def remove(self,value):\n \n for i in range(len(self)):\n if self[i]==value:\n del self[i]\n return\n raise ValueError(\"deque.remove(x): x not in deque\")\n \n def rotate(self,n=1):\n length=len(self)\n if length ==0:\n return\n halflen=(length+1)>>1\n if n >halflen or n <-halflen:\n n %=length\n if n >halflen:\n n -=length\n elif n <-halflen:\n n +=length\n while n >0:\n self.appendleft(self.pop())\n n -=1\n while n <0:\n self.append(self.popleft())\n n +=1\n \n def reverse(self):\n ''\n leftblock=self.left\n rightblock=self.right\n leftindex=self.leftndx\n rightindex=self.rightndx\n for i in range(self.length //2):\n \n assert leftblock !=rightblock or leftindex =0:\n block=self.left\n while block:\n l,r=0,n\n if block is self.left:\n l=self.leftndx\n if block is self.right:\n r=self.rightndx+1\n span=r -l\n if index =negative_span:\n return block,r+index\n index -=negative_span\n block=block[LFTLNK]\n raise IndexError(\"deque index out of range\")\n \n def __getitem__(self,index):\n block,index=self.__getref(index)\n return block[index]\n \n def __setitem__(self,index,value):\n block,index=self.__getref(index)\n block[index]=value\n \n def __delitem__(self,index):\n length=len(self)\n if index >=0:\n if index >=length:\n raise IndexError(\"deque index out of range\")\n self.rotate(-index)\n self.popleft()\n self.rotate(index)\n else :\n \n index=index ^(2 **31)\n if index >=length:\n raise IndexError(\"deque index out of range\")\n self.rotate(index)\n self.pop()\n self.rotate(-index)\n \n def __reduce_ex__(self,proto):\n return type(self),(list(self),self.maxlen)\n \n def __hash__(self):\n \n raise TypeError(\"deque objects are unhashable\")\n \n def __copy__(self):\n return self.__class__(self,self.maxlen)\n \n \n def __eq__(self,other):\n if isinstance(other,deque):\n return list(self)==list(other)\n else :\n return NotImplemented\n \n def __ne__(self,other):\n if isinstance(other,deque):\n return list(self)!=list(other)\n else :\n return NotImplemented\n \n def __lt__(self,other):\n if isinstance(other,deque):\n return list(self)list(other)\n else :\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,deque):\n return list(self)>=list(other)\n else :\n return NotImplemented\n \n def __iadd__(self,other):\n self.extend(other)\n return self\n \n \nclass deque_iterator(object):\n\n def __init__(self,deq,itergen):\n self.counter=len(deq)\n def giveup():\n self.counter=0\n \n raise RuntimeError(\"deque mutated during iteration\")\n self._gen=itergen(deq.state,giveup)\n \n def __next__(self):\n res=self._gen.__next__()\n self.counter -=1\n return res\n \n def __iter__(self):\n return self\n \nclass defaultdict(dict):\n\n def __init__(self,*args,**kwds):\n if len(args)>0:\n default_factory=args[0]\n args=args[1:]\n if not callable(default_factory)and default_factory is not None :\n raise TypeError(\"first argument must be callable\")\n else :\n default_factory=None\n dict.__init__(self,*args,**kwds)\n self.default_factory=default_factory\n self.update(*args,**kwds)\n super(defaultdict,self).__init__(*args,**kwds)\n \n def __missing__(self,key):\n \n if self.default_factory is None :\n raise KeyError(key)\n self[key]=value=self.default_factory()\n return value\n \n def __repr__(self,recurse=set()):\n if id(self)in recurse:\n return \"defaultdict(...)\"\n try :\n recurse.add(id(self))\n return \"defaultdict(%s, %s)\"%(repr(self.default_factory),super(defaultdict,self).__repr__())\n finally :\n recurse.remove(id(self))\n \n def copy(self):\n return type(self)(self.default_factory,self)\n \n def __copy__(self):\n return self.copy()\n \n def __reduce__(self):\n \n \n \n \n \n \n \n \n \n \n \n return (type(self),(self.default_factory,),None ,None ,self.items())\n \nfrom operator import itemgetter as _itemgetter\nfrom keyword import iskeyword as _iskeyword\nimport sys as _sys\n\ndef namedtuple(typename,field_names,verbose=False ,rename=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n if isinstance(field_names,str):\n field_names=field_names.replace(',',' ').split()\n field_names=tuple(map(str,field_names))\n if rename:\n names=list(field_names)\n seen=set()\n for i,name in enumerate(names):\n if (not min(c.isalnum()or c =='_'for c in name)or _iskeyword(name)\n or not name or name[0].isdigit()or name.startswith('_')\n or name in seen):\n names[i]='_%d'%i\n seen.add(name)\n field_names=tuple(names)\n for name in (typename,)+field_names:\n if not min(c.isalnum()or c =='_'for c in name):\n raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r'%name)\n if _iskeyword(name):\n raise ValueError('Type names and field names cannot be a keyword: %r'%name)\n if name[0].isdigit():\n raise ValueError('Type names and field names cannot start with a number: %r'%name)\n seen_names=set()\n for name in field_names:\n if name.startswith('_')and not rename:\n raise ValueError('Field names cannot start with an underscore: %r'%name)\n if name in seen_names:\n raise ValueError('Encountered duplicate field name: %r'%name)\n seen_names.add(name)\n \n \n numfields=len(field_names)\n argtxt=repr(field_names).replace(\"'\",\"\")[1:-1]\n reprtxt=', '.join('%s=%%r'%name for name in field_names)\n \n template='''class %(typename)s(tuple):\n '%(typename)s(%(argtxt)s)' \\n\n __slots__ = () \\n\n _fields = %(field_names)r \\n\n def __new__(_cls, %(argtxt)s):\n return tuple.__new__(_cls, (%(argtxt)s)) \\n\n @classmethod\n def _make(cls, iterable, new=tuple.__new__, len=len):\n 'Make a new %(typename)s object from a sequence or iterable'\n result = new(cls, iterable)\n if len(result) != %(numfields)d:\n raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))\n return result \\n\n def __repr__(self):\n return '%(typename)s(%(reprtxt)s)' %% self \\n\n def _asdict(self):\n 'Return a new dict which maps field names to their values'\n return dict(zip(self._fields, self)) \\n\n def _replace(_self, **kwds):\n 'Return a new %(typename)s object replacing specified fields with new values'\n result = _self._make(map(kwds.pop, %(field_names)r, _self))\n if kwds:\n raise ValueError('Got unexpected field names: %%r' %% kwds.keys())\n return result \\n\n def __getnewargs__(self):\n return tuple(self) \\n\\n'''%locals()\n for i,name in enumerate(field_names):\n template +=' %s = _property(_itemgetter(%d))\\n'%(name,i)\n \n if verbose:\n print(template)\n \n \n namespace=dict(_itemgetter=_itemgetter,__name__='namedtuple_%s'%typename,\n _property=property,_tuple=tuple)\n try :\n exec(template,namespace)\n except SyntaxError as e:\n raise SyntaxError(e.message+':\\n'+template)\n result=namespace[typename]\n \n \n \n \n \n try :\n result.__module__=_sys._getframe(1).f_globals.get('__name__','__main__')\n except (AttributeError,ValueError):\n pass\n \n return result\n \nif __name__ =='__main__':\n Point=namedtuple('Point',['x','y'])\n p=Point(11,y=22)\n print(p[0]+p[1])\n x,y=p\n print(x,y)\n print(p.x+p.y)\n print(p)\n", ["keyword", "operator", "sys"]], "_collections_abc": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) for collections, according to PEP 3119.\n\nUnit tests are in test_collections.\n\"\"\"\n\nfrom abc import ABCMeta,abstractmethod\nimport sys\n\n__all__=[\"Awaitable\",\"Coroutine\",\n\"AsyncIterable\",\"AsyncIterator\",\"AsyncGenerator\",\n\"Hashable\",\"Iterable\",\"Iterator\",\"Generator\",\"Reversible\",\n\"Sized\",\"Container\",\"Callable\",\"Collection\",\n\"Set\",\"MutableSet\",\n\"Mapping\",\"MutableMapping\",\n\"MappingView\",\"KeysView\",\"ItemsView\",\"ValuesView\",\n\"Sequence\",\"MutableSequence\",\n\"ByteString\",\n]\n\n\n\n\n\n__name__=\"collections.abc\"\n\n\n\n\n\n\n\n\nbytes_iterator=type(iter(b''))\nbytearray_iterator=type(iter(bytearray()))\n\ndict_keyiterator=type(iter({}.keys()))\ndict_valueiterator=type(iter({}.values()))\ndict_itemiterator=type(iter({}.items()))\nlist_iterator=type(iter([]))\nlist_reverseiterator=type(iter(reversed([])))\nrange_iterator=type(iter(range(0)))\nlongrange_iterator=type(iter(range(1 <<1000)))\nset_iterator=type(iter(set()))\nstr_iterator=type(iter(\"\"))\ntuple_iterator=type(iter(()))\nzip_iterator=type(iter(zip()))\n\ndict_keys=type({}.keys())\ndict_values=type({}.values())\ndict_items=type({}.items())\n\nmappingproxy=type(type.__dict__)\ngenerator=type((lambda :(yield ))())\n\nasync def _coro():pass\n_coro=_coro()\ncoroutine=type(_coro)\n_coro.close()\ndel _coro\n\nasync def _ag():yield\n_ag=_ag()\nasync_generator=type(_ag)\ndel _ag\n\n\n\n\ndef _check_methods(C,*methods):\n mro=C.__mro__\n for method in methods:\n for B in mro:\n if method in B.__dict__:\n if B.__dict__[method]is None :\n return NotImplemented\n break\n else :\n return NotImplemented\n return True\n \nclass Hashable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __hash__(self):\n return 0\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Hashable:\n return _check_methods(C,\"__hash__\")\n return NotImplemented\n \n \nclass Awaitable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __await__(self):\n yield\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Awaitable:\n return _check_methods(C,\"__await__\")\n return NotImplemented\n \n \nclass Coroutine(Awaitable):\n\n __slots__=()\n \n @abstractmethod\n def send(self,value):\n ''\n\n \n raise StopIteration\n \n @abstractmethod\n def throw(self,typ,val=None ,tb=None ):\n ''\n\n \n if val is None :\n if tb is None :\n raise typ\n val=typ()\n if tb is not None :\n val=val.with_traceback(tb)\n raise val\n \n def close(self):\n ''\n \n try :\n self.throw(GeneratorExit)\n except (GeneratorExit,StopIteration):\n pass\n else :\n raise RuntimeError(\"coroutine ignored GeneratorExit\")\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Coroutine:\n return _check_methods(C,'__await__','send','throw','close')\n return NotImplemented\n \n \nCoroutine.register(coroutine)\n\n\nclass AsyncIterable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __aiter__(self):\n return AsyncIterator()\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is AsyncIterable:\n return _check_methods(C,\"__aiter__\")\n return NotImplemented\n \n \nclass AsyncIterator(AsyncIterable):\n\n __slots__=()\n \n @abstractmethod\n async def __anext__(self):\n ''\n raise StopAsyncIteration\n \n def __aiter__(self):\n return self\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is AsyncIterator:\n return _check_methods(C,\"__anext__\",\"__aiter__\")\n return NotImplemented\n \n \nclass AsyncGenerator(AsyncIterator):\n\n __slots__=()\n \n async def __anext__(self):\n ''\n\n \n return await self.asend(None )\n \n @abstractmethod\n async def asend(self,value):\n ''\n\n \n raise StopAsyncIteration\n \n @abstractmethod\n async def athrow(self,typ,val=None ,tb=None ):\n ''\n\n \n if val is None :\n if tb is None :\n raise typ\n val=typ()\n if tb is not None :\n val=val.with_traceback(tb)\n raise val\n \n async def aclose(self):\n ''\n \n try :\n await self.athrow(GeneratorExit)\n except (GeneratorExit,StopAsyncIteration):\n pass\n else :\n raise RuntimeError(\"asynchronous generator ignored GeneratorExit\")\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is AsyncGenerator:\n return _check_methods(C,'__aiter__','__anext__',\n 'asend','athrow','aclose')\n return NotImplemented\n \n \nAsyncGenerator.register(async_generator)\n\n\nclass Iterable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __iter__(self):\n while False :\n yield None\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Iterable:\n return _check_methods(C,\"__iter__\")\n return NotImplemented\n \n \nclass Iterator(Iterable):\n\n __slots__=()\n \n @abstractmethod\n def __next__(self):\n ''\n raise StopIteration\n \n def __iter__(self):\n return self\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Iterator:\n return _check_methods(C,'__iter__','__next__')\n return NotImplemented\n \nIterator.register(bytes_iterator)\nIterator.register(bytearray_iterator)\n\nIterator.register(dict_keyiterator)\nIterator.register(dict_valueiterator)\nIterator.register(dict_itemiterator)\nIterator.register(list_iterator)\nIterator.register(list_reverseiterator)\nIterator.register(range_iterator)\nIterator.register(longrange_iterator)\nIterator.register(set_iterator)\nIterator.register(str_iterator)\nIterator.register(tuple_iterator)\nIterator.register(zip_iterator)\n\n\nclass Reversible(Iterable):\n\n __slots__=()\n \n @abstractmethod\n def __reversed__(self):\n while False :\n yield None\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Reversible:\n return _check_methods(C,\"__reversed__\",\"__iter__\")\n return NotImplemented\n \n \nclass Generator(Iterator):\n\n __slots__=()\n \n def __next__(self):\n ''\n\n \n return self.send(None )\n \n @abstractmethod\n def send(self,value):\n ''\n\n \n raise StopIteration\n \n @abstractmethod\n def throw(self,typ,val=None ,tb=None ):\n ''\n\n \n if val is None :\n if tb is None :\n raise typ\n val=typ()\n if tb is not None :\n val=val.with_traceback(tb)\n raise val\n \n def close(self):\n ''\n \n try :\n self.throw(GeneratorExit)\n except (GeneratorExit,StopIteration):\n pass\n else :\n raise RuntimeError(\"generator ignored GeneratorExit\")\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Generator:\n return _check_methods(C,'__iter__','__next__',\n 'send','throw','close')\n return NotImplemented\n \nGenerator.register(generator)\n\n\nclass Sized(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __len__(self):\n return 0\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Sized:\n return _check_methods(C,\"__len__\")\n return NotImplemented\n \n \nclass Container(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __contains__(self,x):\n return False\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Container:\n return _check_methods(C,\"__contains__\")\n return NotImplemented\n \nclass Collection(Sized,Iterable,Container):\n\n __slots__=()\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Collection:\n return _check_methods(C,\"__len__\",\"__iter__\",\"__contains__\")\n return NotImplemented\n \nclass Callable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __call__(self,*args,**kwds):\n return False\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Callable:\n return _check_methods(C,\"__call__\")\n return NotImplemented\n \n \n \n \n \nclass Set(Collection):\n\n ''\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n def __le__(self,other):\n if not isinstance(other,Set):\n return NotImplemented\n if len(self)>len(other):\n return False\n for elem in self:\n if elem not in other:\n return False\n return True\n \n def __lt__(self,other):\n if not isinstance(other,Set):\n return NotImplemented\n return len(self)len(other)and self.__ge__(other)\n \n def __ge__(self,other):\n if not isinstance(other,Set):\n return NotImplemented\n if len(self)MAX:\n h -=MASK+1\n if h ==-1:\n h=590923713\n return h\n \nSet.register(frozenset)\n\n\nclass MutableSet(Set):\n ''\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def add(self,value):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def discard(self,value):\n ''\n raise NotImplementedError\n \n def remove(self,value):\n ''\n if value not in self:\n raise KeyError(value)\n self.discard(value)\n \n def pop(self):\n ''\n it=iter(self)\n try :\n value=next(it)\n except StopIteration:\n raise KeyError from None\n self.discard(value)\n return value\n \n def clear(self):\n ''\n try :\n while True :\n self.pop()\n except KeyError:\n pass\n \n def __ior__(self,it):\n for value in it:\n self.add(value)\n return self\n \n def __iand__(self,it):\n for value in (self -it):\n self.discard(value)\n return self\n \n def __ixor__(self,it):\n if it is self:\n self.clear()\n else :\n if not isinstance(it,Set):\n it=self._from_iterable(it)\n for value in it:\n if value in self:\n self.discard(value)\n else :\n self.add(value)\n return self\n \n def __isub__(self,it):\n if it is self:\n self.clear()\n else :\n for value in it:\n self.discard(value)\n return self\n \nMutableSet.register(set)\n\n\n\n\n\nclass Mapping(Collection):\n\n __slots__=()\n \n \"\"\"A Mapping is a generic container for associating key/value\n pairs.\n\n This class provides concrete generic implementations of all\n methods except for __getitem__, __iter__, and __len__.\n\n \"\"\"\n \n @abstractmethod\n def __getitem__(self,key):\n raise KeyError\n \n def get(self,key,default=None ):\n ''\n try :\n return self[key]\n except KeyError:\n return default\n \n def __contains__(self,key):\n try :\n self[key]\n except KeyError:\n return False\n else :\n return True\n \n def keys(self):\n ''\n return KeysView(self)\n \n def items(self):\n ''\n return ItemsView(self)\n \n def values(self):\n ''\n return ValuesView(self)\n \n def __eq__(self,other):\n if not isinstance(other,Mapping):\n return NotImplemented\n return dict(self.items())==dict(other.items())\n \n __reversed__=None\n \nMapping.register(mappingproxy)\n\n\nclass MappingView(Sized):\n\n __slots__='_mapping',\n \n def __init__(self,mapping):\n self._mapping=mapping\n \n def __len__(self):\n return len(self._mapping)\n \n def __repr__(self):\n return '{0.__class__.__name__}({0._mapping!r})'.format(self)\n \n \nclass KeysView(MappingView,Set):\n\n __slots__=()\n \n @classmethod\n def _from_iterable(self,it):\n return set(it)\n \n def __contains__(self,key):\n return key in self._mapping\n \n def __iter__(self):\n yield from self._mapping\n \nKeysView.register(dict_keys)\n\n\nclass ItemsView(MappingView,Set):\n\n __slots__=()\n \n @classmethod\n def _from_iterable(self,it):\n return set(it)\n \n def __contains__(self,item):\n key,value=item\n try :\n v=self._mapping[key]\n except KeyError:\n return False\n else :\n return v is value or v ==value\n \n def __iter__(self):\n for key in self._mapping:\n yield (key,self._mapping[key])\n \nItemsView.register(dict_items)\n\n\nclass ValuesView(MappingView,Collection):\n\n __slots__=()\n \n def __contains__(self,value):\n for key in self._mapping:\n v=self._mapping[key]\n if v is value or v ==value:\n return True\n return False\n \n def __iter__(self):\n for key in self._mapping:\n yield self._mapping[key]\n \nValuesView.register(dict_values)\n\n\nclass MutableMapping(Mapping):\n\n __slots__=()\n \n \"\"\"A MutableMapping is a generic container for associating\n key/value pairs.\n\n This class provides concrete generic implementations of all\n methods except for __getitem__, __setitem__, __delitem__,\n __iter__, and __len__.\n\n \"\"\"\n \n @abstractmethod\n def __setitem__(self,key,value):\n raise KeyError\n \n @abstractmethod\n def __delitem__(self,key):\n raise KeyError\n \n __marker=object()\n \n def pop(self,key,default=__marker):\n ''\n\n \n try :\n value=self[key]\n except KeyError:\n if default is self.__marker:\n raise\n return default\n else :\n del self[key]\n return value\n \n def popitem(self):\n ''\n\n \n try :\n key=next(iter(self))\n except StopIteration:\n raise KeyError from None\n value=self[key]\n del self[key]\n return key,value\n \n def clear(self):\n ''\n try :\n while True :\n self.popitem()\n except KeyError:\n pass\n \n def update(*args,**kwds):\n ''\n\n\n\n \n if not args:\n raise TypeError(\"descriptor 'update' of 'MutableMapping' object \"\n \"needs an argument\")\n self,*args=args\n if len(args)>1:\n raise TypeError('update expected at most 1 arguments, got %d'%\n len(args))\n if args:\n other=args[0]\n if isinstance(other,Mapping):\n for key in other:\n self[key]=other[key]\n elif hasattr(other,\"keys\"):\n for key in other.keys():\n self[key]=other[key]\n else :\n for key,value in other:\n self[key]=value\n for key,value in kwds.items():\n self[key]=value\n \n def setdefault(self,key,default=None ):\n ''\n try :\n return self[key]\n except KeyError:\n self[key]=default\n return default\n \nMutableMapping.register(dict)\n\n\n\n\n\nclass Sequence(Reversible,Collection):\n\n ''\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __getitem__(self,index):\n raise IndexError\n \n def __iter__(self):\n i=0\n try :\n while True :\n v=self[i]\n yield v\n i +=1\n except IndexError:\n return\n \n def __contains__(self,value):\n for v in self:\n if v is value or v ==value:\n return True\n return False\n \n def __reversed__(self):\n for i in reversed(range(len(self))):\n yield self[i]\n \n def index(self,value,start=0,stop=None ):\n ''\n\n\n\n\n \n if start is not None and start <0:\n start=max(len(self)+start,0)\n if stop is not None and stop <0:\n stop +=len(self)\n \n i=start\n while stop is None or i \"\n \n copy=\"\"\n \n def get(self,*args):\n pass\n \n items=\"\"\n \n keys=\"\"\n \n run=\"\"\n \n values=\"\"\n \nclass ContextVar:\n\n def __init__(self,name,**kw):\n ''\n self.name=name\n if \"default\"in kw:\n self.default=kw[\"default\"]\n \n def get(self,*args):\n if hasattr(self,\"value\"):\n return self.value\n elif len(args)==1:\n return args[0]\n elif hasattr(self,\"default\"):\n return self.default\n raise LookupError(self.name)\n \n def reset(self,token):\n if token.old_value ==Token.MISSING:\n del self.value\n else :\n self.value=token.old_value\n \n def set(self,value):\n self.value=value\n return Token(self)\n \nclass Token(object):\n\n MISSING=\"\"\n \n def __init__(self,contextvar):\n self.var=contextvar\n try :\n self.old_value=contextvar.get()\n except LookupError:\n self.old_value=Token.MISSING\n \ndef copy_context(*args,**kw):\n pass\n", []], "_csv": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__=\"1.0\"\n\nQUOTE_MINIMAL,QUOTE_ALL,QUOTE_NONNUMERIC,QUOTE_NONE=range(4)\n_dialects={}\n_field_limit=128 *1024\n\nclass Error(Exception):\n pass\n \nclass Dialect(object):\n ''\n\n \n \n __slots__=[\"_delimiter\",\"_doublequote\",\"_escapechar\",\n \"_lineterminator\",\"_quotechar\",\"_quoting\",\n \"_skipinitialspace\",\"_strict\"]\n \n def __new__(cls,dialect,**kwargs):\n \n for name in kwargs:\n if '_'+name not in Dialect.__slots__:\n raise TypeError(\"unexpected keyword argument '%s'\"%\n (name,))\n \n if dialect is not None :\n if isinstance(dialect,str):\n dialect=get_dialect(dialect)\n \n \n if (isinstance(dialect,Dialect)\n and all(value is None for value in kwargs.values())):\n return dialect\n \n self=object.__new__(cls)\n \n \n def set_char(x):\n if x is None :\n return None\n if isinstance(x,str)and len(x)<=1:\n return x\n raise TypeError(\"%r must be a 1-character string\"%(name,))\n def set_str(x):\n if isinstance(x,str):\n return x\n raise TypeError(\"%r must be a string\"%(name,))\n def set_quoting(x):\n if x in range(4):\n return x\n raise TypeError(\"bad 'quoting' value\")\n \n attributes={\"delimiter\":(',',set_char),\n \"doublequote\":(True ,bool),\n \"escapechar\":(None ,set_char),\n \"lineterminator\":(\"\\r\\n\",set_str),\n \"quotechar\":('\"',set_char),\n \"quoting\":(QUOTE_MINIMAL,set_quoting),\n \"skipinitialspace\":(False ,bool),\n \"strict\":(False ,bool),\n }\n \n \n notset=object()\n for name in Dialect.__slots__:\n name=name[1:]\n value=notset\n if name in kwargs:\n value=kwargs[name]\n elif dialect is not None :\n value=getattr(dialect,name,notset)\n \n \n if value is notset:\n value=attributes[name][0]\n if name =='quoting'and not self.quotechar:\n value=QUOTE_NONE\n else :\n converter=attributes[name][1]\n if converter:\n value=converter(value)\n \n setattr(self,'_'+name,value)\n \n if not self.delimiter:\n raise TypeError(\"delimiter must be set\")\n \n if self.quoting !=QUOTE_NONE and not self.quotechar:\n raise TypeError(\"quotechar must be set if quoting enabled\")\n \n if not self.lineterminator:\n raise TypeError(\"lineterminator must be set\")\n \n return self\n \n delimiter=property(lambda self:self._delimiter)\n doublequote=property(lambda self:self._doublequote)\n escapechar=property(lambda self:self._escapechar)\n lineterminator=property(lambda self:self._lineterminator)\n quotechar=property(lambda self:self._quotechar)\n quoting=property(lambda self:self._quoting)\n skipinitialspace=property(lambda self:self._skipinitialspace)\n strict=property(lambda self:self._strict)\n \n \ndef _call_dialect(dialect_inst,kwargs):\n return Dialect(dialect_inst,**kwargs)\n \ndef register_dialect(name,dialect=None ,**kwargs):\n ''\n \n if not isinstance(name,str):\n raise TypeError(\"dialect name must be a string or unicode\")\n \n dialect=_call_dialect(dialect,kwargs)\n _dialects[name]=dialect\n \ndef unregister_dialect(name):\n ''\n \n try :\n del _dialects[name]\n except KeyError:\n raise Error(\"unknown dialect\")\n \ndef get_dialect(name):\n ''\n \n try :\n return _dialects[name]\n except KeyError:\n raise Error(\"unknown dialect\")\n \ndef list_dialects():\n ''\n \n return list(_dialects)\n \nclass Reader(object):\n ''\n\n\n \n \n \n (START_RECORD,START_FIELD,ESCAPED_CHAR,IN_FIELD,\n IN_QUOTED_FIELD,ESCAPE_IN_QUOTED_FIELD,QUOTE_IN_QUOTED_FIELD,\n EAT_CRNL)=range(8)\n \n def __init__(self,iterator,dialect=None ,**kwargs):\n self.dialect=_call_dialect(dialect,kwargs)\n \n \n \n self._delimiter=self.dialect.delimiter if self.dialect.delimiter else '\\0'\n self._quotechar=self.dialect.quotechar if self.dialect.quotechar else '\\0'\n self._escapechar=self.dialect.escapechar if self.dialect.escapechar else '\\0'\n self._doublequote=self.dialect.doublequote\n self._quoting=self.dialect.quoting\n self._skipinitialspace=self.dialect.skipinitialspace\n self._strict=self.dialect.strict\n \n self.input_iter=iter(iterator)\n self.line_num=0\n \n self._parse_reset()\n \n def _parse_reset(self):\n self.field=''\n self.fields=[]\n self.state=self.START_RECORD\n self.numeric_field=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n self._parse_reset()\n while True :\n try :\n line=next(self.input_iter)\n except StopIteration:\n \n if len(self.field)>0:\n raise Error(\"newline inside string\")\n raise\n \n self.line_num +=1\n \n if '\\0'in line:\n raise Error(\"line contains NULL byte\")\n self._parse_process_char(line)\n self._parse_eol()\n \n if self.state ==self.START_RECORD:\n break\n \n fields=self.fields\n self.fields=[]\n return fields\n \n def _parse_process_char(self,line):\n pos=0\n while pos pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n self._parse_save_field()\n self.state=self.EAT_CRNL\n break\n elif line[pos2]==self._escapechar[0]:\n \n if pos2 >pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n self.state=self.ESCAPED_CHAR\n break\n elif line[pos2]==self._delimiter[0]:\n \n if pos2 >pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n self._parse_save_field()\n self.state=self.START_FIELD\n break\n \n pos2 +=1\n else :\n if pos2 >pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n continue\n \n elif self.state ==self.START_RECORD:\n if line[pos]=='\\n'or line[pos]=='\\r':\n self.state=self.EAT_CRNL\n else :\n self.state=self.START_FIELD\n \n continue\n \n elif self.state ==self.START_FIELD:\n if line[pos]=='\\n'or line[pos]=='\\r':\n \n self._parse_save_field()\n self.state=self.EAT_CRNL\n elif (line[pos]==self._quotechar[0]\n and self._quoting !=QUOTE_NONE):\n \n self.state=self.IN_QUOTED_FIELD\n elif line[pos]==self._escapechar[0]:\n \n self.state=self.ESCAPED_CHAR\n elif self._skipinitialspace and line[pos]==' ':\n \n pass\n elif line[pos]==self._delimiter[0]:\n \n self._parse_save_field()\n else :\n \n if self._quoting ==QUOTE_NONNUMERIC:\n self.numeric_field=True\n self.state=self.IN_FIELD\n continue\n \n elif self.state ==self.ESCAPED_CHAR:\n self._parse_add_char(line[pos])\n self.state=self.IN_FIELD\n \n elif self.state ==self.IN_QUOTED_FIELD:\n if line[pos]==self._escapechar:\n \n self.state=self.ESCAPE_IN_QUOTED_FIELD\n elif (line[pos]==self._quotechar\n and self._quoting !=QUOTE_NONE):\n if self._doublequote:\n \n self.state=self.QUOTE_IN_QUOTED_FIELD\n else :\n \n self.state=self.IN_FIELD\n else :\n \n self._parse_add_char(line[pos])\n \n elif self.state ==self.ESCAPE_IN_QUOTED_FIELD:\n self._parse_add_char(line[pos])\n self.state=self.IN_QUOTED_FIELD\n \n elif self.state ==self.QUOTE_IN_QUOTED_FIELD:\n \n if (line[pos]==self._quotechar\n and self._quoting !=QUOTE_NONE):\n \n self._parse_add_char(line[pos])\n self.state=self.IN_QUOTED_FIELD\n elif line[pos]==self._delimiter[0]:\n \n self._parse_save_field()\n self.state=self.START_FIELD\n elif line[pos]=='\\r'or line[pos]=='\\n':\n \n self._parse_save_field()\n self.state=self.EAT_CRNL\n elif not self._strict:\n self._parse_add_char(line[pos])\n self.state=self.IN_FIELD\n else :\n raise Error(\"'%c' expected after '%c'\"%\n (self._delimiter,self._quotechar))\n \n elif self.state ==self.EAT_CRNL:\n if line[pos]=='\\r'or line[pos]=='\\n':\n pass\n else :\n raise Error(\"new-line character seen in unquoted field - \"\n \"do you need to open the file \"\n \"in universal-newline mode?\")\n \n else :\n raise RuntimeError(\"unknown state: %r\"%(self.state,))\n \n pos +=1\n \n def _parse_eol(self):\n if self.state ==self.EAT_CRNL:\n self.state=self.START_RECORD\n elif self.state ==self.START_RECORD:\n \n pass\n elif self.state ==self.IN_FIELD:\n \n \n self._parse_save_field()\n self.state=self.START_RECORD\n elif self.state ==self.START_FIELD:\n \n self._parse_save_field()\n self.state=self.START_RECORD\n elif self.state ==self.ESCAPED_CHAR:\n self._parse_add_char('\\n')\n self.state=self.IN_FIELD\n elif self.state ==self.IN_QUOTED_FIELD:\n pass\n elif self.state ==self.ESCAPE_IN_QUOTED_FIELD:\n self._parse_add_char('\\n')\n self.state=self.IN_QUOTED_FIELD\n elif self.state ==self.QUOTE_IN_QUOTED_FIELD:\n \n self._parse_save_field()\n self.state=self.START_RECORD\n else :\n raise RuntimeError(\"unknown state: %r\"%(self.state,))\n \n def _parse_save_field(self):\n field,self.field=self.field,''\n if self.numeric_field:\n self.numeric_field=False\n field=float(field)\n self.fields.append(field)\n \n def _parse_add_char(self,c):\n if len(self.field)+1 >_field_limit:\n raise Error(\"field larget than field limit (%d)\"%(_field_limit))\n self.field +=c\n \n def _parse_add_str(self,s):\n if len(self.field)+len(s)>_field_limit:\n raise Error(\"field larget than field limit (%d)\"%(_field_limit))\n self.field +=s\n \n \nclass Writer(object):\n ''\n\n\n \n \n def __init__(self,file,dialect=None ,**kwargs):\n if not (hasattr(file,'write')and callable(file.write)):\n raise TypeError(\"argument 1 must have a 'write' method\")\n self.writeline=file.write\n self.dialect=_call_dialect(dialect,kwargs)\n \n def _join_reset(self):\n self.rec=[]\n self.num_fields=0\n \n def _join_append(self,field,quoted,quote_empty):\n dialect=self.dialect\n \n if self.num_fields >0:\n self.rec.append(dialect.delimiter)\n \n if dialect.quoting ==QUOTE_NONE:\n need_escape=tuple(dialect.lineterminator)+(\n dialect.escapechar,\n dialect.delimiter,dialect.quotechar)\n \n else :\n for c in tuple(dialect.lineterminator)+(\n dialect.delimiter,dialect.escapechar):\n if c and c in field:\n quoted=True\n \n need_escape=()\n if dialect.quotechar in field:\n if dialect.doublequote:\n field=field.replace(dialect.quotechar,\n dialect.quotechar *2)\n quoted=True\n else :\n need_escape=(dialect.quotechar,)\n \n \n for c in need_escape:\n if c and c in field:\n if not dialect.escapechar:\n raise Error(\"need to escape, but no escapechar set\")\n field=field.replace(c,dialect.escapechar+c)\n \n \n if field ==''and quote_empty:\n if dialect.quoting ==QUOTE_NONE:\n raise Error(\"single empty field record must be quoted\")\n quoted=1\n \n if quoted:\n field=dialect.quotechar+field+dialect.quotechar\n \n self.rec.append(field)\n self.num_fields +=1\n \n \n \n def writerow(self,row):\n dialect=self.dialect\n try :\n rowlen=len(row)\n except TypeError:\n raise Error(\"sequence expected\")\n \n \n self._join_reset()\n \n for field in row:\n quoted=False\n if dialect.quoting ==QUOTE_NONNUMERIC:\n try :\n float(field)\n except :\n quoted=True\n \n \n elif dialect.quoting ==QUOTE_ALL:\n quoted=True\n \n if field is None :\n self._join_append(\"\",quoted,rowlen ==1)\n else :\n self._join_append(str(field),quoted,rowlen ==1)\n \n \n self.rec.append(dialect.lineterminator)\n \n self.writeline(''.join(self.rec))\n \n def writerows(self,rows):\n for row in rows:\n self.writerow(row)\n \ndef reader(*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n return Reader(*args,**kwargs)\n \ndef writer(*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n return Writer(*args,**kwargs)\n \n \nundefined=object()\ndef field_size_limit(limit=undefined):\n ''\n\n\n\n \n \n global _field_limit\n old_limit=_field_limit\n \n if limit is not undefined:\n if not isinstance(limit,(int,long)):\n raise TypeError(\"int expected, got %s\"%\n (limit.__class__.__name__,))\n _field_limit=limit\n \n return old_limit\n", []], "_dummy_thread": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['error','start_new_thread','exit','get_ident','allocate_lock',\n'interrupt_main','LockType']\n\n\nTIMEOUT_MAX=2 **31\n\n\n\n\n\n\nerror=RuntimeError\n\ndef start_new_thread(function,args,kwargs={}):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if type(args)!=type(tuple()):\n raise TypeError(\"2nd arg must be a tuple\")\n if type(kwargs)!=type(dict()):\n raise TypeError(\"3rd arg must be a dict\")\n global _main\n _main=False\n try :\n function(*args,**kwargs)\n except SystemExit:\n pass\n except :\n import traceback\n traceback.print_exc()\n _main=True\n global _interrupt\n if _interrupt:\n _interrupt=False\n raise KeyboardInterrupt\n \ndef exit():\n ''\n raise SystemExit\n \ndef get_ident():\n ''\n\n\n\n\n \n return 1\n \ndef allocate_lock():\n ''\n return LockType()\n \ndef stack_size(size=None ):\n ''\n if size is not None :\n raise error(\"setting thread stack size not supported\")\n return 0\n \ndef _set_sentinel():\n ''\n return LockType()\n \nclass LockType(object):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self):\n self.locked_status=False\n \n def acquire(self,waitflag=None ,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n \n if waitflag is None or waitflag:\n self.locked_status=True\n return True\n else :\n if not self.locked_status:\n self.locked_status=True\n return True\n else :\n if timeout >0:\n import time\n time.sleep(timeout)\n return False\n \n __enter__=acquire\n \n def __exit__(self,typ,val,tb):\n self.release()\n \n def release(self):\n ''\n \n \n if not self.locked_status:\n raise error\n self.locked_status=False\n return True\n \n def locked(self):\n return self.locked_status\n \n def __repr__(self):\n return \"<%s %s.%s object at %s>\"%(\n \"locked\"if self.locked_status else \"unlocked\",\n self.__class__.__module__,\n self.__class__.__qualname__,\n hex(id(self))\n )\n \n \n_interrupt=False\n\n_main=True\n\ndef interrupt_main():\n ''\n \n if _main:\n raise KeyboardInterrupt\n else :\n global _interrupt\n _interrupt=True\n", ["time", "traceback"]], "_functools": [".py", "from reprlib import recursive_repr\n\nclass partial:\n ''\n\n \n \n __slots__=\"func\",\"args\",\"keywords\",\"__dict__\",\"__weakref__\"\n \n def __new__(*args,**keywords):\n if not args:\n raise TypeError(\"descriptor '__new__' of partial needs an argument\")\n if len(args)<2:\n raise TypeError(\"type 'partial' takes at least one argument\")\n cls,func,*args=args\n if not callable(func):\n raise TypeError(\"the first argument must be callable\")\n args=tuple(args)\n \n if hasattr(func,\"func\"):\n args=func.args+args\n tmpkw=func.keywords.copy()\n tmpkw.update(keywords)\n keywords=tmpkw\n del tmpkw\n func=func.func\n \n self=super(partial,cls).__new__(cls)\n \n self.func=func\n self.args=args\n self.keywords=keywords\n return self\n \n def __call__(*args,**keywords):\n if not args:\n raise TypeError(\"descriptor '__call__' of partial needs an argument\")\n self,*args=args\n newkeywords=self.keywords.copy()\n newkeywords.update(keywords)\n return self.func(*self.args,*args,**newkeywords)\n \n @recursive_repr()\n def __repr__(self):\n qualname=type(self).__qualname__\n args=[repr(self.func)]\n args.extend(repr(x)for x in self.args)\n args.extend(f\"{k}={v!r}\"for (k,v)in self.keywords.items())\n if type(self).__module__ ==\"functools\":\n return f\"functools.{qualname}({', '.join(args)})\"\n return f\"{qualname}({', '.join(args)})\"\n \n def __reduce__(self):\n return type(self),(self.func,),(self.func,self.args,\n self.keywords or None ,self.__dict__ or None )\n \n def __setstate__(self,state):\n if not isinstance(state,tuple):\n raise TypeError(\"argument to __setstate__ must be a tuple\")\n if len(state)!=4:\n raise TypeError(f\"expected 4 items in state, got {len(state)}\")\n func,args,kwds,namespace=state\n if (not callable(func)or not isinstance(args,tuple)or\n (kwds is not None and not isinstance(kwds,dict))or\n (namespace is not None and not isinstance(namespace,dict))):\n raise TypeError(\"invalid partial state\")\n \n args=tuple(args)\n if kwds is None :\n kwds={}\n elif type(kwds)is not dict:\n kwds=dict(kwds)\n if namespace is None :\n namespace={}\n \n self.__dict__=namespace\n self.func=func\n self.args=args\n self.keywords=kwds\n \ndef reduce(func,iterable,initializer=None ):\n args=iter(iterable)\n if initializer is not None :\n res=initializer\n else :\n res=next(args)\n while True :\n try :\n res=func(res,next(args))\n except StopIteration:\n return res\n", ["reprlib"]], "_imp": [".py", "''\nimport sys\n\ndef _fix_co_filename(*args,**kw):\n ''\n\n\n\n \n pass\n \ndef acquire_lock(*args,**kw):\n ''\n\n \n pass\n \ncheck_hash_based_pycs=\"\"\"default\"\"\"\n\ndef create_builtin(spec):\n ''\n __import__(spec.name)\n \ndef create_dynamic(*args,**kw):\n ''\n pass\n \ndef exec_builtin(*args,**kw):\n ''\n pass\n \ndef exec_dynamic(*args,**kw):\n ''\n pass\n \ndef extension_suffixes(*args,**kw):\n ''\n return []\n \ndef get_frozen_object(*args,**kw):\n ''\n pass\n \ndef init_frozen(*args,**kw):\n ''\n pass\n \ndef is_builtin(module_name):\n\n return module_name in __BRYTHON__.builtin_module_names\n \ndef is_frozen(*args,**kw):\n ''\n return False\n \ndef is_frozen_package(*args,**kw):\n ''\n pass\n \ndef lock_held(*args,**kw):\n ''\n \n return False\n \ndef release_lock(*args,**kw):\n ''\n \n pass\n \ndef source_hash(*args,**kw):\n pass\n", ["sys"]], "_io": [".py", "''\n\n\n\nimport os\nimport abc\nimport codecs\nimport errno\n\ntry :\n from _thread import allocate_lock as Lock\nexcept ImportError:\n from _dummy_thread import allocate_lock as Lock\n \nSEEK_SET=0\nSEEK_CUR=1\nSEEK_END=2\n\nvalid_seek_flags={0,1,2}\nif hasattr(os,'SEEK_HOLE'):\n valid_seek_flags.add(os.SEEK_HOLE)\n valid_seek_flags.add(os.SEEK_DATA)\n \n \nDEFAULT_BUFFER_SIZE=8 *1024\n\n\n\n\n\n\nBlockingIOError=BlockingIOError\n\n\ndef __open(file,mode=\"r\",buffering=-1,encoding=None ,errors=None ,\nnewline=None ,closefd=True ,opener=None ):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(file,(str,bytes,int)):\n raise TypeError(\"invalid file: %r\"%file)\n if not isinstance(mode,str):\n raise TypeError(\"invalid mode: %r\"%mode)\n if not isinstance(buffering,int):\n raise TypeError(\"invalid buffering: %r\"%buffering)\n if encoding is not None and not isinstance(encoding,str):\n raise TypeError(\"invalid encoding: %r\"%encoding)\n if errors is not None and not isinstance(errors,str):\n raise TypeError(\"invalid errors: %r\"%errors)\n modes=set(mode)\n if modes -set(\"axrwb+tU\")or len(mode)>len(modes):\n raise ValueError(\"invalid mode: %r\"%mode)\n creating=\"x\"in modes\n reading=\"r\"in modes\n writing=\"w\"in modes\n appending=\"a\"in modes\n updating=\"+\"in modes\n text=\"t\"in modes\n binary=\"b\"in modes\n if \"U\"in modes:\n if creating or writing or appending:\n raise ValueError(\"can't use U and writing mode at once\")\n reading=True\n if text and binary:\n raise ValueError(\"can't have text and binary mode at once\")\n if creating+reading+writing+appending >1:\n raise ValueError(\"can't have read/write/append mode at once\")\n if not (creating or reading or writing or appending):\n raise ValueError(\"must have exactly one of read/write/append mode\")\n if binary and encoding is not None :\n raise ValueError(\"binary mode doesn't take an encoding argument\")\n if binary and errors is not None :\n raise ValueError(\"binary mode doesn't take an errors argument\")\n if binary and newline is not None :\n raise ValueError(\"binary mode doesn't take a newline argument\")\n raw=FileIO(file,\n (creating and \"x\"or \"\")+\n (reading and \"r\"or \"\")+\n (writing and \"w\"or \"\")+\n (appending and \"a\"or \"\")+\n (updating and \"+\"or \"\"),\n closefd,opener=opener)\n line_buffering=False\n if buffering ==1 or buffering <0 and raw.isatty():\n buffering=-1\n line_buffering=True\n if buffering <0:\n buffering=DEFAULT_BUFFER_SIZE\n try :\n bs=os.fstat(raw.fileno()).st_blksize\n except (os.error,AttributeError):\n pass\n else :\n if bs >1:\n buffering=bs\n if buffering <0:\n raise ValueError(\"invalid buffering size\")\n if buffering ==0:\n if binary:\n return raw\n raise ValueError(\"can't have unbuffered text I/O\")\n if updating:\n buffer=BufferedRandom(raw,buffering)\n elif creating or writing or appending:\n buffer=BufferedWriter(raw,buffering)\n elif reading:\n buffer=BufferedReader(raw,buffering)\n else :\n raise ValueError(\"unknown mode: %r\"%mode)\n if binary:\n return buffer\n text=TextIOWrapper(buffer,encoding,errors,newline,line_buffering)\n text.mode=mode\n return text\n \nopen=__open\n\nclass DocDescriptor:\n ''\n \n def __get__(self,obj,typ):\n return (\n \"open(file, mode='r', buffering=-1, encoding=None, \"\n \"errors=None, newline=None, closefd=True)\\n\\n\"+\n open.__doc__)\n \nclass OpenWrapper:\n ''\n\n\n\n\n\n \n __doc__=DocDescriptor()\n \n def __new__(cls,*args,**kwargs):\n return open(*args,**kwargs)\n \n \n \n \nclass UnsupportedOperation(ValueError,IOError):\n pass\n \n \nclass IOBase(metaclass=abc.ABCMeta):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n def _unsupported(self,name):\n ''\n raise UnsupportedOperation(\"%s.%s() not supported\"%\n (self.__class__.__name__,name))\n \n \n \n def seek(self,pos,whence=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self._unsupported(\"seek\")\n \n def tell(self):\n ''\n return self.seek(0,1)\n \n def truncate(self,pos=None ):\n ''\n\n\n\n \n self._unsupported(\"truncate\")\n \n \n \n def flush(self):\n ''\n\n\n \n self._checkClosed()\n \n \n __closed=False\n \n def close(self):\n ''\n\n\n \n if not self.__closed:\n try :\n self.flush()\n finally :\n self.__closed=True\n \n def __del__(self):\n ''\n \n \n \n \n \n try :\n self.close()\n except :\n pass\n \n \n \n def seekable(self):\n ''\n\n\n\n \n return False\n \n def _checkSeekable(self,msg=None ):\n ''\n \n if not self.seekable():\n raise UnsupportedOperation(\"File or stream is not seekable.\"\n if msg is None else msg)\n \n def readable(self):\n ''\n\n\n \n return False\n \n def _checkReadable(self,msg=None ):\n ''\n \n if not self.readable():\n raise UnsupportedOperation(\"File or stream is not readable.\"\n if msg is None else msg)\n \n def writable(self):\n ''\n\n\n \n return False\n \n def _checkWritable(self,msg=None ):\n ''\n \n if not self.writable():\n raise UnsupportedOperation(\"File or stream is not writable.\"\n if msg is None else msg)\n \n @property\n def closed(self):\n ''\n\n\n \n return self.__closed\n \n def _checkClosed(self,msg=None ):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed file.\"\n if msg is None else msg)\n \n \n \n def __enter__(self):\n ''\n self._checkClosed()\n return self\n \n def __exit__(self,*args):\n ''\n self.close()\n \n \n \n \n \n def fileno(self):\n ''\n\n\n \n self._unsupported(\"fileno\")\n \n def isatty(self):\n ''\n\n\n \n self._checkClosed()\n return False\n \n \n \n def readline(self,limit=-1):\n ''\n\n\n\n\n\n\n\n \n \n if hasattr(self,\"peek\"):\n def nreadahead():\n readahead=self.peek(1)\n if not readahead:\n return 1\n n=(readahead.find(b\"\\n\")+1)or len(readahead)\n if limit >=0:\n n=min(n,limit)\n return n\n else :\n def nreadahead():\n return 1\n if limit is None :\n limit=-1\n elif not isinstance(limit,int):\n raise TypeError(\"limit must be an integer\")\n res=bytearray()\n while limit <0 or len(res)=hint:\n break\n return lines\n \n def writelines(self,lines):\n self._checkClosed()\n for line in lines:\n self.write(line)\n \n \n \n \n \nclass RawIOBase(IOBase):\n\n ''\n \n \n \n \n \n \n \n \n \n \n \n def read(self,n=-1):\n ''\n\n\n\n \n if n is None :\n n=-1\n if n <0:\n return self.readall()\n b=bytearray(n.__index__())\n n=self.readinto(b)\n if n is None :\n return None\n del b[n:]\n return bytes(b)\n \n def readall(self):\n ''\n res=bytearray()\n while True :\n data=self.read(DEFAULT_BUFFER_SIZE)\n if not data:\n break\n res +=data\n if res:\n return bytes(res)\n else :\n \n return data\n \n def readinto(self,b):\n ''\n\n\n\n \n self._unsupported(\"readinto\")\n \n def write(self,b):\n ''\n\n\n \n self._unsupported(\"write\")\n \n \n \n \n \n \n \nclass BufferedIOBase(IOBase):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def read(self,n=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._unsupported(\"read\")\n \n def read1(self,n=None ):\n ''\n\n \n self._unsupported(\"read1\")\n \n def readinto(self,b):\n ''\n\n\n\n\n\n\n\n\n \n \n data=self.read(len(b))\n n=len(data)\n try :\n b[:n]=data\n except TypeError as err:\n import array\n if not isinstance(b,array.array):\n raise err\n b[:n]=array.array('b',data)\n return n\n \n def write(self,b):\n ''\n\n\n\n\n\n\n \n self._unsupported(\"write\")\n \n def detach(self):\n ''\n\n\n\n\n \n self._unsupported(\"detach\")\n \n \n \n \n \nclass _BufferedIOMixin(BufferedIOBase):\n\n ''\n\n\n\n\n \n \n def __init__(self,raw):\n self._raw=raw\n \n \n \n def seek(self,pos,whence=0):\n new_position=self.raw.seek(pos,whence)\n if new_position <0:\n raise IOError(\"seek() returned an invalid position\")\n return new_position\n \n def tell(self):\n pos=self.raw.tell()\n if pos <0:\n raise IOError(\"tell() returned an invalid position\")\n return pos\n \n def truncate(self,pos=None ):\n \n \n \n self.flush()\n \n if pos is None :\n pos=self.tell()\n \n \n return self.raw.truncate(pos)\n \n \n \n def flush(self):\n if self.closed:\n raise ValueError(\"flush of closed file\")\n self.raw.flush()\n \n def close(self):\n if self.raw is not None and not self.closed:\n try :\n \n self.flush()\n finally :\n self.raw.close()\n \n def detach(self):\n if self.raw is None :\n raise ValueError(\"raw stream already detached\")\n self.flush()\n raw=self._raw\n self._raw=None\n return raw\n \n \n \n def seekable(self):\n return self.raw.seekable()\n \n def readable(self):\n return self.raw.readable()\n \n def writable(self):\n return self.raw.writable()\n \n @property\n def raw(self):\n return self._raw\n \n @property\n def closed(self):\n return self.raw.closed\n \n @property\n def name(self):\n return self.raw.name\n \n @property\n def mode(self):\n return self.raw.mode\n \n def __getstate__(self):\n raise TypeError(\"can not serialize a '{0}' object\"\n .format(self.__class__.__name__))\n \n def __repr__(self):\n clsname=self.__class__.__name__\n try :\n name=self.name\n except AttributeError:\n return \"<_io.{0}>\".format(clsname)\n else :\n return \"<_io.{0} name={1!r}>\".format(clsname,name)\n \n \n \n def fileno(self):\n return self.raw.fileno()\n \n def isatty(self):\n return self.raw.isatty()\n \n \nclass BytesIO(BufferedIOBase):\n\n ''\n \n def __init__(self,initial_bytes=None ):\n buf=bytearray()\n if initial_bytes is not None :\n buf +=initial_bytes\n self._buffer=buf\n self._pos=0\n \n def __getstate__(self):\n if self.closed:\n raise ValueError(\"__getstate__ on closed file\")\n return self.__dict__.copy()\n \n def getvalue(self):\n ''\n \n if self.closed:\n raise ValueError(\"getvalue on closed file\")\n return bytes(self._buffer)\n \n def getbuffer(self):\n ''\n \n return memoryview(self._buffer)\n \n def read(self,n=None ):\n if self.closed:\n raise ValueError(\"read from closed file\")\n if n is None :\n n=-1\n if n <0:\n n=len(self._buffer)\n if len(self._buffer)<=self._pos:\n return b\"\"\n newpos=min(len(self._buffer),self._pos+n)\n b=self._buffer[self._pos:newpos]\n self._pos=newpos\n return bytes(b)\n \n def read1(self,n):\n ''\n \n return self.read(n)\n \n def write(self,b):\n if self.closed:\n raise ValueError(\"write to closed file\")\n if isinstance(b,str):\n raise TypeError(\"can't write str to binary stream\")\n n=len(b)\n if n ==0:\n return 0\n pos=self._pos\n if pos >len(self._buffer):\n \n \n padding=b'\\x00'*(pos -len(self._buffer))\n self._buffer +=padding\n self._buffer[pos:pos+n]=b\n self._pos +=n\n return n\n \n def seek(self,pos,whence=0):\n if self.closed:\n raise ValueError(\"seek on closed file\")\n try :\n pos.__index__\n except AttributeError as err:\n raise TypeError(\"an integer is required\")from err\n if whence ==0:\n if pos <0:\n raise ValueError(\"negative seek position %r\"%(pos,))\n self._pos=pos\n elif whence ==1:\n self._pos=max(0,self._pos+pos)\n elif whence ==2:\n self._pos=max(0,len(self._buffer)+pos)\n else :\n raise ValueError(\"unsupported whence value\")\n return self._pos\n \n def tell(self):\n if self.closed:\n raise ValueError(\"tell on closed file\")\n return self._pos\n \n def truncate(self,pos=None ):\n if self.closed:\n raise ValueError(\"truncate on closed file\")\n if pos is None :\n pos=self._pos\n else :\n try :\n pos.__index__\n except AttributeError as err:\n raise TypeError(\"an integer is required\")from err\n if pos <0:\n raise ValueError(\"negative truncate position %r\"%(pos,))\n del self._buffer[pos:]\n return pos\n \n def readable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n def writable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n def seekable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n \nclass BufferedReader(_BufferedIOMixin):\n\n ''\n\n\n\n\n\n\n \n \n def __init__(self,raw,buffer_size=DEFAULT_BUFFER_SIZE):\n ''\n \n if not raw.readable():\n raise IOError('\"raw\" argument must be readable.')\n \n _BufferedIOMixin.__init__(self,raw)\n if buffer_size <=0:\n raise ValueError(\"invalid buffer size\")\n self.buffer_size=buffer_size\n self._reset_read_buf()\n self._read_lock=Lock()\n \n def _reset_read_buf(self):\n self._read_buf=b\"\"\n self._read_pos=0\n \n def flush(self):\n pass\n \n def read(self,n=None ):\n ''\n\n\n\n\n\n \n if n is not None and n <-1:\n raise ValueError(\"invalid number of bytes to read\")\n with self._read_lock:\n return self._read_unlocked(n)\n \n def _read_unlocked(self,n=None ):\n nodata_val=b\"\"\n empty_values=(b\"\",None )\n buf=self._read_buf\n pos=self._read_pos\n \n \n if n is None or n ==-1:\n self._reset_read_buf()\n if hasattr(self.raw,'readall'):\n chunk=self.raw.readall()\n if chunk is None :\n return buf[pos:]or None\n else :\n return buf[pos:]+chunk\n chunks=[buf[pos:]]\n current_size=0\n while True :\n \n try :\n chunk=self.raw.read()\n except InterruptedError:\n continue\n if chunk in empty_values:\n nodata_val=chunk\n break\n current_size +=len(chunk)\n chunks.append(chunk)\n return b\"\".join(chunks)or nodata_val\n \n \n avail=len(buf)-pos\n if n <=avail:\n \n self._read_pos +=n\n return buf[pos:pos+n]\n \n \n chunks=[buf[pos:]]\n wanted=max(self.buffer_size,n)\n while avail self.buffer_size:\n \n \n self._flush_unlocked()\n before=len(self._write_buf)\n self._write_buf.extend(b)\n written=len(self._write_buf)-before\n if len(self._write_buf)>self.buffer_size:\n try :\n self._flush_unlocked()\n except BlockingIOError as e:\n if len(self._write_buf)>self.buffer_size:\n \n \n overage=len(self._write_buf)-self.buffer_size\n written -=overage\n self._write_buf=self._write_buf[:self.buffer_size]\n raise BlockingIOError(e.errno,e.strerror,written)\n return written\n \n def truncate(self,pos=None ):\n with self._write_lock:\n self._flush_unlocked()\n if pos is None :\n pos=self.raw.tell()\n return self.raw.truncate(pos)\n \n def flush(self):\n with self._write_lock:\n self._flush_unlocked()\n \n def _flush_unlocked(self):\n if self.closed:\n raise ValueError(\"flush of closed file\")\n while self._write_buf:\n try :\n n=self.raw.write(self._write_buf)\n except InterruptedError:\n continue\n except BlockingIOError:\n raise RuntimeError(\"self.raw should implement RawIOBase: it \"\n \"should not raise BlockingIOError\")\n if n is None :\n raise BlockingIOError(\n errno.EAGAIN,\n \"write could not complete without blocking\",0)\n if n >len(self._write_buf)or n <0:\n raise IOError(\"write() returned incorrect number of bytes\")\n del self._write_buf[:n]\n \n def tell(self):\n return _BufferedIOMixin.tell(self)+len(self._write_buf)\n \n def seek(self,pos,whence=0):\n if whence not in valid_seek_flags:\n raise ValueError(\"invalid whence value\")\n with self._write_lock:\n self._flush_unlocked()\n return _BufferedIOMixin.seek(self,pos,whence)\n \n \nclass BufferedRWPair(BufferedIOBase):\n\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n def __init__(self,reader,writer,buffer_size=DEFAULT_BUFFER_SIZE):\n ''\n\n\n \n if not reader.readable():\n raise IOError('\"reader\" argument must be readable.')\n \n if not writer.writable():\n raise IOError('\"writer\" argument must be writable.')\n \n self.reader=BufferedReader(reader,buffer_size)\n self.writer=BufferedWriter(writer,buffer_size)\n \n def read(self,n=None ):\n if n is None :\n n=-1\n return self.reader.read(n)\n \n def readinto(self,b):\n return self.reader.readinto(b)\n \n def write(self,b):\n return self.writer.write(b)\n \n def peek(self,n=0):\n return self.reader.peek(n)\n \n def read1(self,n):\n return self.reader.read1(n)\n \n def readable(self):\n return self.reader.readable()\n \n def writable(self):\n return self.writer.writable()\n \n def flush(self):\n return self.writer.flush()\n \n def close(self):\n self.writer.close()\n self.reader.close()\n \n def isatty(self):\n return self.reader.isatty()or self.writer.isatty()\n \n @property\n def closed(self):\n return self.writer.closed\n \n \nclass BufferedRandom(BufferedWriter,BufferedReader):\n\n ''\n\n\n\n\n \n \n def __init__(self,raw,buffer_size=DEFAULT_BUFFER_SIZE):\n raw._checkSeekable()\n BufferedReader.__init__(self,raw,buffer_size)\n BufferedWriter.__init__(self,raw,buffer_size)\n \n def seek(self,pos,whence=0):\n if whence not in valid_seek_flags:\n raise ValueError(\"invalid whence value\")\n self.flush()\n if self._read_buf:\n \n with self._read_lock:\n self.raw.seek(self._read_pos -len(self._read_buf),1)\n \n \n pos=self.raw.seek(pos,whence)\n with self._read_lock:\n self._reset_read_buf()\n if pos <0:\n raise IOError(\"seek() returned invalid position\")\n return pos\n \n def tell(self):\n if self._write_buf:\n return BufferedWriter.tell(self)\n else :\n return BufferedReader.tell(self)\n \n def truncate(self,pos=None ):\n if pos is None :\n pos=self.tell()\n \n return BufferedWriter.truncate(self,pos)\n \n def read(self,n=None ):\n if n is None :\n n=-1\n self.flush()\n return BufferedReader.read(self,n)\n \n def readinto(self,b):\n self.flush()\n return BufferedReader.readinto(self,b)\n \n def peek(self,n=0):\n self.flush()\n return BufferedReader.peek(self,n)\n \n def read1(self,n):\n self.flush()\n return BufferedReader.read1(self,n)\n \n def write(self,b):\n if self._read_buf:\n \n with self._read_lock:\n self.raw.seek(self._read_pos -len(self._read_buf),1)\n self._reset_read_buf()\n return BufferedWriter.write(self,b)\n \n \nclass TextIOBase(IOBase):\n\n ''\n\n\n\n\n \n \n def read(self,n=-1):\n ''\n\n\n\n\n\n \n self._unsupported(\"read\")\n \n def write(self,s):\n ''\n self._unsupported(\"write\")\n \n def truncate(self,pos=None ):\n ''\n self._unsupported(\"truncate\")\n \n def readline(self):\n ''\n\n\n \n self._unsupported(\"readline\")\n \n def detach(self):\n ''\n\n\n\n\n \n self._unsupported(\"detach\")\n \n @property\n def encoding(self):\n ''\n return None\n \n @property\n def newlines(self):\n ''\n\n\n\n\n \n return None\n \n @property\n def errors(self):\n ''\n\n \n return None\n \n \n \n \n \nclass IncrementalNewlineDecoder(codecs.IncrementalDecoder):\n ''\n\n\n\n\n \n def __init__(self,decoder,translate,errors='strict'):\n codecs.IncrementalDecoder.__init__(self,errors=errors)\n self.translate=translate\n self.decoder=decoder\n self.seennl=0\n self.pendingcr=False\n \n def decode(self,input,final=False ):\n \n if self.decoder is None :\n output=input\n else :\n output=self.decoder.decode(input,final=final)\n if self.pendingcr and (output or final):\n output=\"\\r\"+output\n self.pendingcr=False\n \n \n \n if output.endswith(\"\\r\")and not final:\n output=output[:-1]\n self.pendingcr=True\n \n \n crlf=output.count('\\r\\n')\n cr=output.count('\\r')-crlf\n lf=output.count('\\n')-crlf\n self.seennl |=(lf and self._LF)|(cr and self._CR)\\\n |(crlf and self._CRLF)\n \n if self.translate:\n if crlf:\n output=output.replace(\"\\r\\n\",\"\\n\")\n if cr:\n output=output.replace(\"\\r\",\"\\n\")\n \n return output\n \n def getstate(self):\n if self.decoder is None :\n buf=b\"\"\n flag=0\n else :\n buf,flag=self.decoder.getstate()\n flag <<=1\n if self.pendingcr:\n flag |=1\n return buf,flag\n \n def setstate(self,state):\n buf,flag=state\n self.pendingcr=bool(flag&1)\n if self.decoder is not None :\n self.decoder.setstate((buf,flag >>1))\n \n def reset(self):\n self.seennl=0\n self.pendingcr=False\n if self.decoder is not None :\n self.decoder.reset()\n \n _LF=1\n _CR=2\n _CRLF=4\n \n @property\n def newlines(self):\n return (None ,\n \"\\n\",\n \"\\r\",\n (\"\\r\",\"\\n\"),\n \"\\r\\n\",\n (\"\\n\",\"\\r\\n\"),\n (\"\\r\",\"\\r\\n\"),\n (\"\\r\",\"\\n\",\"\\r\\n\")\n )[self.seennl]\n \n \nclass TextIOWrapper(TextIOBase):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n _CHUNK_SIZE=2048\n \n \n \n \n def __init__(self,buffer,encoding=None ,errors=None ,newline=None ,\n line_buffering=False ,write_through=False ):\n if newline is not None and not isinstance(newline,str):\n raise TypeError(\"illegal newline type: %r\"%(type(newline),))\n if newline not in (None ,\"\",\"\\n\",\"\\r\",\"\\r\\n\"):\n raise ValueError(\"illegal newline value: %r\"%(newline,))\n if encoding is None :\n try :\n encoding=os.device_encoding(buffer.fileno())\n except (AttributeError,UnsupportedOperation):\n pass\n if encoding is None :\n try :\n import locale\n except ImportError:\n \n encoding=\"ascii\"\n else :\n encoding=locale.getpreferredencoding(False )\n \n if not isinstance(encoding,str):\n raise ValueError(\"invalid encoding: %r\"%encoding)\n \n if errors is None :\n errors=\"strict\"\n else :\n if not isinstance(errors,str):\n raise ValueError(\"invalid errors: %r\"%errors)\n \n self._buffer=buffer\n self._line_buffering=line_buffering\n self._encoding=encoding\n self._errors=errors\n self._readuniversal=not newline\n self._readtranslate=newline is None\n self._readnl=newline\n self._writetranslate=newline !=''\n self._writenl=newline or os.linesep\n self._encoder=None\n self._decoder=None\n self._decoded_chars=''\n self._decoded_chars_used=0\n self._snapshot=None\n self._seekable=self._telling=self.buffer.seekable()\n self._has_read1=hasattr(self.buffer,'read1')\n self._b2cratio=0.0\n \n if self._seekable and self.writable():\n position=self.buffer.tell()\n if position !=0:\n try :\n self._get_encoder().setstate(0)\n except LookupError:\n \n pass\n \n \n \n \n \n \n \n \n \n \n def __repr__(self):\n result=\"<_io.TextIOWrapper\"\n try :\n name=self.name\n except AttributeError:\n pass\n else :\n result +=\" name={0!r}\".format(name)\n try :\n mode=self.mode\n except AttributeError:\n pass\n else :\n result +=\" mode={0!r}\".format(mode)\n return result+\" encoding={0!r}>\".format(self.encoding)\n \n @property\n def encoding(self):\n return self._encoding\n \n @property\n def errors(self):\n return self._errors\n \n @property\n def line_buffering(self):\n return self._line_buffering\n \n @property\n def buffer(self):\n return self._buffer\n \n def seekable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return self._seekable\n \n def readable(self):\n return self.buffer.readable()\n \n def writable(self):\n return self.buffer.writable()\n \n def flush(self):\n self.buffer.flush()\n self._telling=self._seekable\n \n def close(self):\n if self.buffer is not None and not self.closed:\n try :\n self.flush()\n finally :\n self.buffer.close()\n \n @property\n def closed(self):\n return self.buffer.closed\n \n @property\n def name(self):\n return self.buffer.name\n \n def fileno(self):\n return self.buffer.fileno()\n \n def isatty(self):\n return self.buffer.isatty()\n \n def write(self,s):\n ''\n if self.closed:\n raise ValueError(\"write to closed file\")\n if not isinstance(s,str):\n raise TypeError(\"can't write %s to text stream\"%\n s.__class__.__name__)\n length=len(s)\n haslf=(self._writetranslate or self._line_buffering)and \"\\n\"in s\n if haslf and self._writetranslate and self._writenl !=\"\\n\":\n s=s.replace(\"\\n\",self._writenl)\n encoder=self._encoder or self._get_encoder()\n \n b=encoder.encode(s)\n self.buffer.write(b)\n if self._line_buffering and (haslf or \"\\r\"in s):\n self.flush()\n self._snapshot=None\n if self._decoder:\n self._decoder.reset()\n return length\n \n def _get_encoder(self):\n make_encoder=codecs.getincrementalencoder(self._encoding)\n self._encoder=make_encoder(self._errors)\n return self._encoder\n \n def _get_decoder(self):\n make_decoder=codecs.getincrementaldecoder(self._encoding)\n decoder=make_decoder(self._errors)\n if self._readuniversal:\n decoder=IncrementalNewlineDecoder(decoder,self._readtranslate)\n self._decoder=decoder\n return decoder\n \n \n \n \n def _set_decoded_chars(self,chars):\n ''\n self._decoded_chars=chars\n self._decoded_chars_used=0\n \n def _get_decoded_chars(self,n=None ):\n ''\n offset=self._decoded_chars_used\n if n is None :\n chars=self._decoded_chars[offset:]\n else :\n chars=self._decoded_chars[offset:offset+n]\n self._decoded_chars_used +=len(chars)\n return chars\n \n def _rewind_decoded_chars(self,n):\n ''\n if self._decoded_chars_used 0:\n decoder.setstate((b'',dec_flags))\n \n n=len(decoder.decode(next_input[:skip_bytes]))\n if n <=chars_to_skip:\n b,d=decoder.getstate()\n if not b:\n \n dec_flags=d\n chars_to_skip -=n\n break\n \n skip_bytes -=len(b)\n skip_back=1\n else :\n \n skip_bytes -=skip_back\n skip_back=skip_back *2\n else :\n skip_bytes=0\n decoder.setstate((b'',dec_flags))\n \n \n start_pos=position+skip_bytes\n start_flags=dec_flags\n if chars_to_skip ==0:\n \n return self._pack_cookie(start_pos,start_flags)\n \n \n \n \n \n bytes_fed=0\n need_eof=0\n \n chars_decoded=0\n for i in range(skip_bytes,len(next_input)):\n bytes_fed +=1\n chars_decoded +=len(decoder.decode(next_input[i:i+1]))\n dec_buffer,dec_flags=decoder.getstate()\n if not dec_buffer and chars_decoded <=chars_to_skip:\n \n start_pos +=bytes_fed\n chars_to_skip -=chars_decoded\n start_flags,bytes_fed,chars_decoded=dec_flags,0,0\n if chars_decoded >=chars_to_skip:\n break\n else :\n \n chars_decoded +=len(decoder.decode(b'',final=True ))\n need_eof=1\n if chars_decoded =0:\n endpos=pos+1\n break\n else :\n start=len(line)\n \n elif self._readuniversal:\n \n \n \n \n nlpos=line.find(\"\\n\",start)\n crpos=line.find(\"\\r\",start)\n if crpos ==-1:\n if nlpos ==-1:\n \n start=len(line)\n else :\n \n endpos=nlpos+1\n break\n elif nlpos ==-1:\n \n endpos=crpos+1\n break\n elif nlpos =0:\n endpos=pos+len(self._readnl)\n break\n \n if limit >=0 and len(line)>=limit:\n endpos=limit\n break\n \n \n while self._read_chunk():\n if self._decoded_chars:\n break\n if self._decoded_chars:\n line +=self._get_decoded_chars()\n else :\n \n self._set_decoded_chars('')\n self._snapshot=None\n return line\n \n if limit >=0 and endpos >limit:\n endpos=limit\n \n \n self._rewind_decoded_chars(len(line)-endpos)\n return line[:endpos]\n \n @property\n def newlines(self):\n return self._decoder.newlines if self._decoder else None\n \n \nclass StringIO(TextIOWrapper):\n ''\n\n\n\n \n \n def __init__(self,initial_value=\"\",newline=\"\\n\"):\n super(StringIO,self).__init__(BytesIO(),\n encoding=\"utf-8\",\n errors=\"strict\",\n newline=newline)\n \n \n if newline is None :\n self._writetranslate=False\n if initial_value is not None :\n if not isinstance(initial_value,str):\n raise TypeError(\"initial_value must be str or None, not {0}\"\n .format(type(initial_value).__name__))\n initial_value=str(initial_value)\n self.write(initial_value)\n self.seek(0)\n \n def getvalue(self):\n self.flush()\n return self.buffer.getvalue().decode(self._encoding,self._errors)\n \n def __repr__(self):\n \n \n return object.__repr__(self)\n \n @property\n def errors(self):\n return None\n \n @property\n def encoding(self):\n return None\n \n def detach(self):\n \n self._unsupported(\"detach\")\n \nclass FileIO(RawIOBase):\n pass\n \n_IOBase=IOBase\n_RawIOBase=RawIOBase\n_BufferedIOBase=BufferedIOBase\n_TextIOBase=TextIOBase\n", ["_dummy_thread", "_thread", "abc", "array", "codecs", "errno", "locale", "os"]], "_markupbase": [".py", "''\n\n\n\n\n\n\nimport re\n\n_declname_match=re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\\s*').match\n_declstringlit_match=re.compile(r'(\\'[^\\']*\\'|\"[^\"]*\")\\s*').match\n_commentclose=re.compile(r'--\\s*>')\n_markedsectionclose=re.compile(r']\\s*]\\s*>')\n\n\n\n\n_msmarkedsectionclose=re.compile(r']\\s*>')\n\ndel re\n\n\nclass ParserBase:\n ''\n \n \n def __init__(self):\n if self.__class__ is ParserBase:\n raise RuntimeError(\n \"_markupbase.ParserBase must be subclassed\")\n \n def error(self,message):\n raise NotImplementedError(\n \"subclasses of ParserBase must override error()\")\n \n def reset(self):\n self.lineno=1\n self.offset=0\n \n def getpos(self):\n ''\n return self.lineno,self.offset\n \n \n \n \n \n def updatepos(self,i,j):\n if i >=j:\n return j\n rawdata=self.rawdata\n nlines=rawdata.count(\"\\n\",i,j)\n if nlines:\n self.lineno=self.lineno+nlines\n pos=rawdata.rindex(\"\\n\",i,j)\n self.offset=j -(pos+1)\n else :\n self.offset=self.offset+j -i\n return j\n \n _decl_otherchars=''\n \n \n def parse_declaration(self,i):\n \n \n \n \n \n \n \n \n \n \n rawdata=self.rawdata\n j=i+2\n assert rawdata[i:j]==\"\":\n \n return j+1\n if rawdata[j:j+1]in (\"-\",\"\"):\n \n \n return -1\n \n n=len(rawdata)\n if rawdata[j:j+2]=='--':\n \n return self.parse_comment(i)\n elif rawdata[j]=='[':\n \n \n \n \n return self.parse_marked_section(i)\n else :\n decltype,j=self._scan_name(j,i)\n if j <0:\n return j\n if decltype ==\"doctype\":\n self._decl_otherchars=''\n while j \":\n \n data=rawdata[i+2:j]\n if decltype ==\"doctype\":\n self.handle_decl(data)\n else :\n \n \n \n \n self.unknown_decl(data)\n return j+1\n if c in \"\\\"'\":\n m=_declstringlit_match(rawdata,j)\n if not m:\n return -1\n j=m.end()\n elif c in \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\":\n name,j=self._scan_name(j,i)\n elif c in self._decl_otherchars:\n j=j+1\n elif c ==\"[\":\n \n if decltype ==\"doctype\":\n j=self._parse_doctype_subset(j+1,i)\n elif decltype in {\"attlist\",\"linktype\",\"link\",\"element\"}:\n \n \n \n \n self.error(\"unsupported '[' char in %s declaration\"%decltype)\n else :\n self.error(\"unexpected '[' char in declaration\")\n else :\n self.error(\n \"unexpected %r char in declaration\"%rawdata[j])\n if j <0:\n return j\n return -1\n \n \n \n def parse_marked_section(self,i,report=1):\n rawdata=self.rawdata\n assert rawdata[i:i+3]=='n:\n \n return -1\n if rawdata[j:j+4]==\"\n \n \"\"\"%(self.OutputString(attrs).replace('\"',r'\\\"'))\n \n def OutputString(self,attrs=None ):\n \n \n result=[]\n append=result.append\n \n \n append(\"%s=%s\"%(self.key,self.coded_value))\n \n \n if attrs is None :\n attrs=self._reserved\n items=sorted(self.items())\n for key,value in items:\n if value ==\"\":\n continue\n if key not in attrs:\n continue\n if key ==\"expires\"and isinstance(value,int):\n append(\"%s=%s\"%(self._reserved[key],_getdate(value)))\n elif key ==\"max-age\"and isinstance(value,int):\n append(\"%s=%d\"%(self._reserved[key],value))\n elif key ==\"secure\":\n append(str(self._reserved[key]))\n elif key ==\"httponly\":\n append(str(self._reserved[key]))\n else :\n append(\"%s=%s\"%(self._reserved[key],value))\n \n \n return _semispacejoin(result)\n \n \n \n \n \n \n \n \n \n \n \n_LegalCharsPatt=r\"[\\w\\d!#%&'~_`><@,:/\\$\\*\\+\\-\\.\\^\\|\\)\\(\\?\\}\\{\\=]\"\n_CookiePattern=re.compile(r\"\"\"\n (?x) # This is a verbose pattern\n (?P # Start of group 'key'\n \"\"\"+_LegalCharsPatt+r\"\"\"+? # Any word of at least one letter\n ) # End of group 'key'\n ( # Optional group: there may not be a value.\n \\s*=\\s* # Equal Sign\n (?P # Start of group 'val'\n \"(?:[^\\\\\"]|\\\\.)*\" # Any doublequoted string\n | # or\n \\w{3},\\s[\\w\\d\\s-]{9,11}\\s[\\d:]{8}\\sGMT # Special case for \"expires\" attr\n | # or\n \"\"\"+_LegalCharsPatt+r\"\"\"* # Any word or empty string\n ) # End of group 'val'\n )? # End of optional value group\n \\s* # Any number of spaces.\n (\\s+|;|$) # Ending either at space, semicolon, or EOS.\n \"\"\",re.ASCII)\n\n\n\n\n\nclass BaseCookie(dict):\n ''\n \n def value_decode(self,val):\n ''\n\n\n\n\n \n return val,val\n \n def value_encode(self,val):\n ''\n\n\n\n \n strval=str(val)\n return strval,strval\n \n def __init__(self,input=None ):\n if input:\n self.load(input)\n \n def __set(self,key,real_value,coded_value):\n ''\n M=self.get(key,Morsel())\n M.set(key,real_value,coded_value)\n dict.__setitem__(self,key,M)\n \n def __setitem__(self,key,value):\n ''\n rval,cval=self.value_encode(value)\n self.__set(key,rval,cval)\n \n def output(self,attrs=None ,header=\"Set-Cookie:\",sep=\"\\015\\012\"):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.output(attrs,header))\n return sep.join(result)\n \n __str__=output\n \n def __repr__(self):\n l=[]\n items=sorted(self.items())\n for key,value in items:\n l.append('%s=%s'%(key,repr(value.value)))\n return '<%s: %s>'%(self.__class__.__name__,_spacejoin(l))\n \n def js_output(self,attrs=None ):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.js_output(attrs))\n return _nulljoin(result)\n \n def load(self,rawdata):\n ''\n\n\n\n \n if isinstance(rawdata,str):\n self.__parse_string(rawdata)\n else :\n \n for key,value in rawdata.items():\n self[key]=value\n return\n \n def __parse_string(self,str,patt=_CookiePattern):\n i=0\n n=len(str)\n M=None\n \n while 0 <=i ModuleType:\n ''\n\n\n\n \n if hasattr(package,'__spec__'):\n if package.__spec__.submodule_search_locations is None :\n raise TypeError('{!r} is not a package'.format(\n package.__spec__.name))\n else :\n return package\n else :\n module=import_module(package)\n if module.__spec__.submodule_search_locations is None :\n raise TypeError('{!r} is not a package'.format(package))\n else :\n return module\n \n \ndef _normalize_path(path)->str:\n ''\n\n\n \n parent,file_name=os.path.split(path)\n if parent:\n raise ValueError('{!r} must be only a file name'.format(path))\n else :\n return file_name\n \n \ndef _get_resource_reader(\npackage:ModuleType)->Optional[resources_abc.ResourceReader]:\n\n\n\n\n\n spec=package.__spec__\n if hasattr(spec.loader,'get_resource_reader'):\n return cast(resources_abc.ResourceReader,\n spec.loader.get_resource_reader(spec.name))\n return None\n \n \ndef _check_location(package):\n if package.__spec__.origin is None or not package.__spec__.has_location:\n raise FileNotFoundError(f'Package has no location {package!r}')\n \n \ndef open_binary(package:Package,resource:Resource)->BinaryIO:\n ''\n resource=_normalize_path(resource)\n package=_get_package(package)\n reader=_get_resource_reader(package)\n if reader is not None :\n return reader.open_resource(resource)\n _check_location(package)\n absolute_package_path=os.path.abspath(package.__spec__.origin)\n package_path=os.path.dirname(absolute_package_path)\n full_path=os.path.join(package_path,resource)\n try :\n return open(full_path,mode='rb')\n except OSError:\n \n \n \n loader=cast(ResourceLoader,package.__spec__.loader)\n data=None\n if hasattr(package.__spec__.loader,'get_data'):\n with suppress(OSError):\n data=loader.get_data(full_path)\n if data is None :\n package_name=package.__spec__.name\n message='{!r} resource not found in {!r}'.format(\n resource,package_name)\n raise FileNotFoundError(message)\n else :\n return BytesIO(data)\n \n \ndef open_text(package:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict')->TextIO:\n ''\n resource=_normalize_path(resource)\n package=_get_package(package)\n reader=_get_resource_reader(package)\n if reader is not None :\n return TextIOWrapper(reader.open_resource(resource),encoding,errors)\n _check_location(package)\n absolute_package_path=os.path.abspath(package.__spec__.origin)\n package_path=os.path.dirname(absolute_package_path)\n full_path=os.path.join(package_path,resource)\n try :\n return open(full_path,mode='r',encoding=encoding,errors=errors)\n except OSError:\n \n \n \n loader=cast(ResourceLoader,package.__spec__.loader)\n data=None\n if hasattr(package.__spec__.loader,'get_data'):\n with suppress(OSError):\n data=loader.get_data(full_path)\n if data is None :\n package_name=package.__spec__.name\n message='{!r} resource not found in {!r}'.format(\n resource,package_name)\n raise FileNotFoundError(message)\n else :\n return TextIOWrapper(BytesIO(data),encoding,errors)\n \n \ndef read_binary(package:Package,resource:Resource)->bytes:\n ''\n resource=_normalize_path(resource)\n package=_get_package(package)\n with open_binary(package,resource)as fp:\n return fp.read()\n \n \ndef read_text(package:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict')->str:\n ''\n\n\n\n \n resource=_normalize_path(resource)\n package=_get_package(package)\n with open_text(package,resource,encoding,errors)as fp:\n return fp.read()\n \n \n@contextmanager\ndef path(package:Package,resource:Resource)->Iterator[Path]:\n ''\n\n\n\n\n\n\n \n resource=_normalize_path(resource)\n package=_get_package(package)\n reader=_get_resource_reader(package)\n if reader is not None :\n try :\n yield Path(reader.resource_path(resource))\n return\n except FileNotFoundError:\n pass\n else :\n _check_location(package)\n \n \n package_directory=Path(package.__spec__.origin).parent\n file_path=package_directory /resource\n if file_path.exists():\n yield file_path\n else :\n with open_binary(package,resource)as fp:\n data=fp.read()\n \n \n \n fd,raw_path=tempfile.mkstemp()\n try :\n os.write(fd,data)\n os.close(fd)\n yield Path(raw_path)\n finally :\n try :\n os.remove(raw_path)\n except FileNotFoundError:\n pass\n \n \ndef is_resource(package:Package,name:str)->bool:\n ''\n\n\n \n package=_get_package(package)\n _normalize_path(name)\n reader=_get_resource_reader(package)\n if reader is not None :\n return reader.is_resource(name)\n try :\n package_contents=set(contents(package))\n except (NotADirectoryError,FileNotFoundError):\n return False\n if name not in package_contents:\n return False\n \n \n \n path=Path(package.__spec__.origin).parent /name\n return path.is_file()\n \n \ndef contents(package:Package)->Iterable[str]:\n ''\n\n\n\n\n \n package=_get_package(package)\n reader=_get_resource_reader(package)\n if reader is not None :\n return reader.contents()\n \n \n \n elif package.__spec__.origin is None or not package.__spec__.has_location:\n return ()\n else :\n package_directory=Path(package.__spec__.origin).parent\n return os.listdir(package_directory)\n \n \n \n \n \n \n \n \nclass _ZipImportResourceReader(resources_abc.ResourceReader):\n ''\n\n\n\n \n \n def __init__(self,zipimporter,fullname):\n self.zipimporter=zipimporter\n self.fullname=fullname\n \n def open_resource(self,resource):\n fullname_as_path=self.fullname.replace('.','/')\n path=f'{fullname_as_path}/{resource}'\n try :\n return BytesIO(self.zipimporter.get_data(path))\n except OSError:\n raise FileNotFoundError(path)\n \n def resource_path(self,resource):\n \n \n \n raise FileNotFoundError\n \n def is_resource(self,name):\n \n \n fullname_as_path=self.fullname.replace('.','/')\n path=f'{fullname_as_path}/{name}'\n try :\n self.zipimporter.get_data(path)\n except OSError:\n return False\n return True\n \n def contents(self):\n \n \n \n \n \n \n \n fullname_path=Path(self.zipimporter.get_filename(self.fullname))\n relative_path=fullname_path.relative_to(self.zipimporter.archive)\n \n \n assert relative_path.name =='__init__.py'\n package_path=relative_path.parent\n subdirs_seen=set()\n for filename in self.zipimporter._files:\n try :\n relative=Path(filename).relative_to(package_path)\n except ValueError:\n continue\n \n \n \n \n parent_name=relative.parent.name\n if len(parent_name)==0:\n yield relative.name\n elif parent_name not in subdirs_seen:\n subdirs_seen.add(parent_name)\n yield parent_name\n \n \n \ndef _zipimport_get_resource_reader(zipimporter,fullname):\n try :\n if not zipimporter.is_package(fullname):\n return None\n except ZipImportError:\n return None\n return _ZipImportResourceReader(zipimporter,fullname)\n", ["contextlib", "importlib", "importlib.abc", "io", "os", "pathlib", "tempfile", "types", "typing", "typing.io", "zipimport"]], "importlib.util": [".py", "''\nfrom . import abc\nfrom ._bootstrap import module_from_spec\nfrom ._bootstrap import _resolve_name\nfrom ._bootstrap import spec_from_loader\nfrom ._bootstrap import _find_spec\nfrom ._bootstrap_external import MAGIC_NUMBER\nfrom ._bootstrap_external import _RAW_MAGIC_NUMBER\nfrom ._bootstrap_external import cache_from_source\nfrom ._bootstrap_external import decode_source\nfrom ._bootstrap_external import source_from_cache\nfrom ._bootstrap_external import spec_from_file_location\n\nfrom contextlib import contextmanager\nimport _imp\nimport functools\nimport sys\nimport types\nimport warnings\n\n\ndef source_hash(source_bytes):\n ''\n return _imp.source_hash(_RAW_MAGIC_NUMBER,source_bytes)\n \n \ndef resolve_name(name,package):\n ''\n if not name.startswith('.'):\n return name\n elif not package:\n raise ValueError(f'no package specified for {repr(name)} '\n '(required for relative module names)')\n level=0\n for character in name:\n if character !='.':\n break\n level +=1\n return _resolve_name(name[level:],package,level)\n \n \ndef _find_spec_from_path(name,path=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if name not in sys.modules:\n return _find_spec(name,path)\n else :\n module=sys.modules[name]\n if module is None :\n return None\n try :\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else :\n if spec is None :\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \ndef find_spec(name,package=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n fullname=resolve_name(name,package)if name.startswith('.')else name\n if fullname not in sys.modules:\n parent_name=fullname.rpartition('.')[0]\n if parent_name:\n parent=__import__(parent_name,fromlist=['__path__'])\n try :\n parent_path=parent.__path__\n except AttributeError as e:\n raise ModuleNotFoundError(\n f\"__path__ attribute not found on {parent_name!r} \"\n f\"while trying to find {fullname!r}\",name=fullname)from e\n else :\n parent_path=None\n return _find_spec(fullname,parent_path)\n else :\n module=sys.modules[fullname]\n if module is None :\n return None\n try :\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else :\n if spec is None :\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \n@contextmanager\ndef _module_to_load(name):\n is_reload=name in sys.modules\n \n module=sys.modules.get(name)\n if not is_reload:\n \n \n \n module=type(sys)(name)\n \n \n module.__initializing__=True\n sys.modules[name]=module\n try :\n yield module\n except Exception:\n if not is_reload:\n try :\n del sys.modules[name]\n except KeyError:\n pass\n finally :\n module.__initializing__=False\n \n \ndef set_package(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_package_wrapper(*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically.',\n DeprecationWarning,stacklevel=2)\n module=fxn(*args,**kwargs)\n if getattr(module,'__package__',None )is None :\n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=module.__package__.rpartition('.')[0]\n return module\n return set_package_wrapper\n \n \ndef set_loader(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_loader_wrapper(self,*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically.',\n DeprecationWarning,stacklevel=2)\n module=fxn(self,*args,**kwargs)\n if getattr(module,'__loader__',None )is None :\n module.__loader__=self\n return module\n return set_loader_wrapper\n \n \ndef module_for_loader(fxn):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n warnings.warn('The import system now takes care of this automatically.',\n DeprecationWarning,stacklevel=2)\n @functools.wraps(fxn)\n def module_for_loader_wrapper(self,fullname,*args,**kwargs):\n with _module_to_load(fullname)as module:\n module.__loader__=self\n try :\n is_package=self.is_package(fullname)\n except (ImportError,AttributeError):\n pass\n else :\n if is_package:\n module.__package__=fullname\n else :\n module.__package__=fullname.rpartition('.')[0]\n \n return fxn(self,module,*args,**kwargs)\n \n return module_for_loader_wrapper\n \n \nclass _LazyModule(types.ModuleType):\n\n ''\n \n def __getattribute__(self,attr):\n ''\n \n \n \n self.__class__=types.ModuleType\n \n \n original_name=self.__spec__.name\n \n \n attrs_then=self.__spec__.loader_state['__dict__']\n original_type=self.__spec__.loader_state['__class__']\n attrs_now=self.__dict__\n attrs_updated={}\n for key,value in attrs_now.items():\n \n \n if key not in attrs_then:\n attrs_updated[key]=value\n elif id(attrs_now[key])!=id(attrs_then[key]):\n attrs_updated[key]=value\n self.__spec__.loader.exec_module(self)\n \n \n if original_name in sys.modules:\n if id(self)!=id(sys.modules[original_name]):\n raise ValueError(f\"module object for {original_name!r} \"\n \"substituted in sys.modules during a lazy \"\n \"load\")\n \n \n self.__dict__.update(attrs_updated)\n return getattr(self,attr)\n \n def __delattr__(self,attr):\n ''\n \n \n self.__getattribute__(attr)\n delattr(self,attr)\n \n \nclass LazyLoader(abc.Loader):\n\n ''\n \n @staticmethod\n def __check_eager_loader(loader):\n if not hasattr(loader,'exec_module'):\n raise TypeError('loader must define exec_module()')\n \n @classmethod\n def factory(cls,loader):\n ''\n cls.__check_eager_loader(loader)\n return lambda *args,**kwargs:cls(loader(*args,**kwargs))\n \n def __init__(self,loader):\n self.__check_eager_loader(loader)\n self.loader=loader\n \n def create_module(self,spec):\n return self.loader.create_module(spec)\n \n def exec_module(self,module):\n ''\n module.__spec__.loader=self.loader\n module.__loader__=self.loader\n \n \n \n \n loader_state={}\n loader_state['__dict__']=module.__dict__.copy()\n loader_state['__class__']=module.__class__\n module.__spec__.loader_state=loader_state\n module.__class__=_LazyModule\n", ["_imp", "contextlib", "functools", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "importlib.abc", "sys", "types", "warnings"]], "importlib._bootstrap": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_bootstrap_external=None\n\ndef _wrap(new,old):\n ''\n for replace in ['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n \ndef _new_module(name):\n return type(sys)(name)\n \n \n \n \n \n \n_module_locks={}\n\n_blocking_on={}\n\n\nclass _DeadlockError(RuntimeError):\n pass\n \n \nclass _ModuleLock:\n ''\n\n\n \n \n def __init__(self,name):\n self.lock=_thread.allocate_lock()\n self.wakeup=_thread.allocate_lock()\n self.name=name\n self.owner=None\n self.count=0\n self.waiters=0\n \n def has_deadlock(self):\n \n me=_thread.get_ident()\n tid=self.owner\n while True :\n lock=_blocking_on.get(tid)\n if lock is None :\n return False\n tid=lock.owner\n if tid ==me:\n return True\n \n def acquire(self):\n ''\n\n\n\n \n tid=_thread.get_ident()\n _blocking_on[tid]=self\n try :\n while True :\n with self.lock:\n if self.count ==0 or self.owner ==tid:\n self.owner=tid\n self.count +=1\n return True\n if self.has_deadlock():\n raise _DeadlockError('deadlock detected by %r'%self)\n if self.wakeup.acquire(False ):\n self.waiters +=1\n \n self.wakeup.acquire()\n self.wakeup.release()\n finally :\n del _blocking_on[tid]\n \n def release(self):\n tid=_thread.get_ident()\n with self.lock:\n if self.owner !=tid:\n raise RuntimeError('cannot release un-acquired lock')\n assert self.count >0\n self.count -=1\n if self.count ==0:\n self.owner=None\n if self.waiters:\n self.waiters -=1\n self.wakeup.release()\n \n def __repr__(self):\n return '_ModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _DummyModuleLock:\n ''\n \n \n def __init__(self,name):\n self.name=name\n self.count=0\n \n def acquire(self):\n self.count +=1\n return True\n \n def release(self):\n if self.count ==0:\n raise RuntimeError('cannot release un-acquired lock')\n self.count -=1\n \n def __repr__(self):\n return '_DummyModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _ModuleLockManager:\n\n def __init__(self,name):\n self._name=name\n self._lock=None\n \n def __enter__(self):\n self._lock=_get_module_lock(self._name)\n self._lock.acquire()\n \n def __exit__(self,*args,**kwargs):\n self._lock.release()\n \n \n \n \ndef _get_module_lock(name):\n ''\n\n\n \n \n _imp.acquire_lock()\n try :\n try :\n lock=_module_locks[name]()\n except KeyError:\n lock=None\n \n if lock is None :\n if _thread is None :\n lock=_DummyModuleLock(name)\n else :\n lock=_ModuleLock(name)\n \n def cb(ref,name=name):\n _imp.acquire_lock()\n try :\n \n \n \n if _module_locks.get(name)is ref:\n del _module_locks[name]\n finally :\n _imp.release_lock()\n \n _module_locks[name]=_weakref.ref(lock,cb)\n finally :\n _imp.release_lock()\n \n return lock\n \n \ndef _lock_unlock_module(name):\n ''\n\n\n\n \n lock=_get_module_lock(name)\n try :\n lock.acquire()\n except _DeadlockError:\n \n \n pass\n else :\n lock.release()\n \n \ndef _call_with_frames_removed(f,*args,**kwds):\n ''\n\n\n\n\n\n \n return f(*args,**kwds)\n \n \ndef _verbose_message(message,*args,verbosity=1):\n ''\n if sys.flags.verbose >=verbosity:\n if not message.startswith(('#','import ')):\n message='# '+message\n print(message.format(*args),file=sys.stderr)\n \n \ndef _requires_builtin(fxn):\n ''\n def _requires_builtin_wrapper(self,fullname):\n if fullname not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_builtin_wrapper,fxn)\n return _requires_builtin_wrapper\n \n \ndef _requires_frozen(fxn):\n ''\n def _requires_frozen_wrapper(self,fullname):\n if not _imp.is_frozen(fullname):\n raise ImportError('{!r} is not a frozen module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_frozen_wrapper,fxn)\n return _requires_frozen_wrapper\n \n \n \ndef _load_module_shim(self,fullname):\n ''\n\n\n\n \n spec=spec_from_loader(fullname,self)\n if fullname in sys.modules:\n module=sys.modules[fullname]\n _exec(spec,module)\n return sys.modules[fullname]\n else :\n return _load(spec)\n \n \n \ndef _module_repr(module):\n\n loader=getattr(module,'__loader__',None )\n if hasattr(loader,'module_repr'):\n \n \n \n try :\n return loader.module_repr(module)\n except Exception:\n pass\n try :\n spec=module.__spec__\n except AttributeError:\n pass\n else :\n if spec is not None :\n return _module_repr_from_spec(spec)\n \n \n \n try :\n name=module.__name__\n except AttributeError:\n name='?'\n try :\n filename=module.__file__\n except AttributeError:\n if loader is None :\n return ''.format(name)\n else :\n return ''.format(name,loader)\n else :\n return ''.format(name,filename)\n \n \nclass _installed_safely:\n\n def __init__(self,module):\n self._module=module\n self._spec=module.__spec__\n \n def __enter__(self):\n \n \n \n self._spec._initializing=True\n sys.modules[self._spec.name]=self._module\n \n def __exit__(self,*args):\n try :\n spec=self._spec\n if any(arg is not None for arg in args):\n try :\n del sys.modules[spec.name]\n except KeyError:\n pass\n else :\n _verbose_message('import {!r} # {!r}',spec.name,spec.loader)\n finally :\n self._spec._initializing=False\n \n \nclass ModuleSpec:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,name,loader,*,origin=None ,loader_state=None ,\n is_package=None ):\n self.name=name\n self.loader=loader\n self.origin=origin\n self.loader_state=loader_state\n self.submodule_search_locations=[]if is_package else None\n \n \n self._set_fileattr=False\n self._cached=None\n \n def __repr__(self):\n args=['name={!r}'.format(self.name),\n 'loader={!r}'.format(self.loader)]\n if self.origin is not None :\n args.append('origin={!r}'.format(self.origin))\n if self.submodule_search_locations is not None :\n args.append('submodule_search_locations={}'\n .format(self.submodule_search_locations))\n return '{}({})'.format(self.__class__.__name__,', '.join(args))\n \n def __eq__(self,other):\n smsl=self.submodule_search_locations\n try :\n return (self.name ==other.name and\n self.loader ==other.loader and\n self.origin ==other.origin and\n smsl ==other.submodule_search_locations and\n self.cached ==other.cached and\n self.has_location ==other.has_location)\n except AttributeError:\n return False\n \n @property\n def cached(self):\n if self._cached is None :\n if self.origin is not None and self._set_fileattr:\n if _bootstrap_external is None :\n raise NotImplementedError\n self._cached=_bootstrap_external._get_cached(self.origin)\n return self._cached\n \n @cached.setter\n def cached(self,cached):\n self._cached=cached\n \n @property\n def parent(self):\n ''\n if self.submodule_search_locations is None :\n return self.name.rpartition('.')[0]\n else :\n return self.name\n \n @property\n def has_location(self):\n return self._set_fileattr\n \n @has_location.setter\n def has_location(self,value):\n self._set_fileattr=bool(value)\n \n \ndef spec_from_loader(name,loader,*,origin=None ,is_package=None ):\n ''\n if hasattr(loader,'get_filename'):\n if _bootstrap_external is None :\n raise NotImplementedError\n spec_from_file_location=_bootstrap_external.spec_from_file_location\n \n if is_package is None :\n return spec_from_file_location(name,loader=loader)\n search=[]if is_package else None\n return spec_from_file_location(name,loader=loader,\n submodule_search_locations=search)\n \n if is_package is None :\n if hasattr(loader,'is_package'):\n try :\n is_package=loader.is_package(name)\n except ImportError:\n is_package=None\n else :\n \n is_package=False\n \n return ModuleSpec(name,loader,origin=origin,is_package=is_package)\n \n \ndef _spec_from_module(module,loader=None ,origin=None ):\n\n try :\n spec=module.__spec__\n except AttributeError:\n pass\n else :\n if spec is not None :\n return spec\n \n name=module.__name__\n if loader is None :\n try :\n loader=module.__loader__\n except AttributeError:\n \n pass\n try :\n location=module.__file__\n except AttributeError:\n location=None\n if origin is None :\n if location is None :\n try :\n origin=loader._ORIGIN\n except AttributeError:\n origin=None\n else :\n origin=location\n try :\n cached=module.__cached__\n except AttributeError:\n cached=None\n try :\n submodule_search_locations=list(module.__path__)\n except AttributeError:\n submodule_search_locations=None\n \n spec=ModuleSpec(name,loader,origin=origin)\n spec._set_fileattr=False if location is None else True\n spec.cached=cached\n spec.submodule_search_locations=submodule_search_locations\n return spec\n \n \ndef _init_module_attrs(spec,module,*,override=False ):\n\n\n\n if (override or getattr(module,'__name__',None )is None ):\n try :\n module.__name__=spec.name\n except AttributeError:\n pass\n \n if override or getattr(module,'__loader__',None )is None :\n loader=spec.loader\n if loader is None :\n \n if spec.submodule_search_locations is not None :\n if _bootstrap_external is None :\n raise NotImplementedError\n _NamespaceLoader=_bootstrap_external._NamespaceLoader\n \n loader=_NamespaceLoader.__new__(_NamespaceLoader)\n loader._path=spec.submodule_search_locations\n spec.loader=loader\n \n \n \n \n \n \n \n \n \n \n module.__file__=None\n try :\n module.__loader__=loader\n except AttributeError:\n pass\n \n if override or getattr(module,'__package__',None )is None :\n try :\n module.__package__=spec.parent\n except AttributeError:\n pass\n \n try :\n module.__spec__=spec\n except AttributeError:\n pass\n \n if override or getattr(module,'__path__',None )is None :\n if spec.submodule_search_locations is not None :\n try :\n module.__path__=spec.submodule_search_locations\n except AttributeError:\n pass\n \n if spec.has_location:\n if override or getattr(module,'__file__',None )is None :\n try :\n module.__file__=spec.origin\n except AttributeError:\n pass\n \n if override or getattr(module,'__cached__',None )is None :\n if spec.cached is not None :\n try :\n module.__cached__=spec.cached\n except AttributeError:\n pass\n return module\n \n \ndef module_from_spec(spec):\n ''\n \n module=None\n if hasattr(spec.loader,'create_module'):\n \n \n module=spec.loader.create_module(spec)\n elif hasattr(spec.loader,'exec_module'):\n raise ImportError('loaders that define exec_module() '\n 'must also define create_module()')\n if module is None :\n module=_new_module(spec.name)\n _init_module_attrs(spec,module)\n return module\n \n \ndef _module_repr_from_spec(spec):\n ''\n \n name='?'if spec.name is None else spec.name\n if spec.origin is None :\n if spec.loader is None :\n return ''.format(name)\n else :\n return ''.format(name,spec.loader)\n else :\n if spec.has_location:\n return ''.format(name,spec.origin)\n else :\n return ''.format(spec.name,spec.origin)\n \n \n \ndef _exec(spec,module):\n ''\n name=spec.name\n with _ModuleLockManager(name):\n if sys.modules.get(name)is not module:\n msg='module {!r} not in sys.modules'.format(name)\n raise ImportError(msg,name=name)\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('missing loader',name=spec.name)\n \n _init_module_attrs(spec,module,override=True )\n return module\n _init_module_attrs(spec,module,override=True )\n if not hasattr(spec.loader,'exec_module'):\n \n \n \n spec.loader.load_module(name)\n else :\n spec.loader.exec_module(module)\n return sys.modules[name]\n \n \ndef _load_backward_compatible(spec):\n\n\n\n spec.loader.load_module(spec.name)\n \n module=sys.modules[spec.name]\n if getattr(module,'__loader__',None )is None :\n try :\n module.__loader__=spec.loader\n except AttributeError:\n pass\n if getattr(module,'__package__',None )is None :\n try :\n \n \n \n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=spec.name.rpartition('.')[0]\n except AttributeError:\n pass\n if getattr(module,'__spec__',None )is None :\n try :\n module.__spec__=spec\n except AttributeError:\n pass\n return module\n \ndef _load_unlocked(spec):\n\n if spec.loader is not None :\n \n if not hasattr(spec.loader,'exec_module'):\n return _load_backward_compatible(spec)\n \n module=module_from_spec(spec)\n with _installed_safely(module):\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('missing loader',name=spec.name)\n \n else :\n spec.loader.exec_module(module)\n \n \n \n \n return sys.modules[spec.name]\n \n \n \ndef _load(spec):\n ''\n\n\n\n\n\n\n \n with _ModuleLockManager(spec.name):\n return _load_unlocked(spec)\n \n \n \n \nclass BuiltinImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(module):\n ''\n\n\n\n \n return ''.format(module.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n if path is not None :\n return None\n if _imp.is_builtin(fullname):\n return spec_from_loader(fullname,cls,origin='built-in')\n else :\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n\n\n \n spec=cls.find_spec(fullname,path)\n return spec.loader if spec is not None else None\n \n @classmethod\n def create_module(self,spec):\n ''\n if spec.name not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(spec.name),\n name=spec.name)\n return _call_with_frames_removed(_imp.create_builtin,spec)\n \n @classmethod\n def exec_module(self,module):\n ''\n _call_with_frames_removed(_imp.exec_builtin,module)\n \n @classmethod\n @_requires_builtin\n def get_code(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def is_package(cls,fullname):\n ''\n return False\n \n load_module=classmethod(_load_module_shim)\n \n \nclass FrozenImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(m):\n ''\n\n\n\n \n return ''.format(m.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n if _imp.is_frozen(fullname):\n return spec_from_loader(fullname,cls,origin='frozen')\n else :\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n \n return cls if _imp.is_frozen(fullname)else None\n \n @classmethod\n def create_module(cls,spec):\n ''\n \n @staticmethod\n def exec_module(module):\n name=module.__spec__.name\n if not _imp.is_frozen(name):\n raise ImportError('{!r} is not a frozen module'.format(name),\n name=name)\n code=_call_with_frames_removed(_imp.get_frozen_object,name)\n exec(code,module.__dict__)\n \n @classmethod\n def load_module(cls,fullname):\n ''\n\n\n\n \n return _load_module_shim(cls,fullname)\n \n @classmethod\n @_requires_frozen\n def get_code(cls,fullname):\n ''\n return _imp.get_frozen_object(fullname)\n \n @classmethod\n @_requires_frozen\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_frozen\n def is_package(cls,fullname):\n ''\n return _imp.is_frozen_package(fullname)\n \n \n \n \nclass _ImportLockContext:\n\n ''\n \n def __enter__(self):\n ''\n _imp.acquire_lock()\n \n def __exit__(self,exc_type,exc_value,exc_traceback):\n ''\n _imp.release_lock()\n \n \ndef _resolve_name(name,package,level):\n ''\n bits=package.rsplit('.',level -1)\n if len(bits)= 0')\n if level >0:\n if not isinstance(package,str):\n raise TypeError('__package__ not set to a string')\n elif not package:\n raise ImportError('attempted relative import with no known parent '\n 'package')\n if not name and level ==0:\n raise ValueError('Empty module name')\n \n \n_ERR_MSG_PREFIX='No module named '\n_ERR_MSG=_ERR_MSG_PREFIX+'{!r}'\n\ndef _find_and_load_unlocked(name,import_):\n path=None\n parent=name.rpartition('.')[0]\n if parent:\n if parent not in sys.modules:\n _call_with_frames_removed(import_,parent)\n \n if name in sys.modules:\n return sys.modules[name]\n parent_module=sys.modules[parent]\n try :\n path=parent_module.__path__\n except AttributeError:\n msg=(_ERR_MSG+'; {!r} is not a package').format(name,parent)\n raise ModuleNotFoundError(msg,name=name)from None\n spec=_find_spec(name,path)\n if spec is None :\n raise ModuleNotFoundError(_ERR_MSG.format(name),name=name)\n else :\n module=_load_unlocked(spec)\n if parent:\n \n parent_module=sys.modules[parent]\n setattr(parent_module,name.rpartition('.')[2],module)\n return module\n \n \n_NEEDS_LOADING=object()\n\n\ndef _find_and_load(name,import_):\n ''\n with _ModuleLockManager(name):\n module=sys.modules.get(name,_NEEDS_LOADING)\n if module is _NEEDS_LOADING:\n return _find_and_load_unlocked(name,import_)\n \n if module is None :\n message=('import of {} halted; '\n 'None in sys.modules'.format(name))\n raise ModuleNotFoundError(message,name=name)\n \n _lock_unlock_module(name)\n return module\n \n \ndef _gcd_import(name,package=None ,level=0):\n ''\n\n\n\n\n\n\n \n _sanity_check(name,package,level)\n if level >0:\n name=_resolve_name(name,package,level)\n return _find_and_load(name,_gcd_import)\n \n \ndef _handle_fromlist(module,fromlist,import_,*,recursive=False ):\n ''\n\n\n\n\n\n \n \n \n if hasattr(module,'__path__'):\n for x in fromlist:\n if not isinstance(x,str):\n if recursive:\n where=module.__name__+'.__all__'\n else :\n where=\"``from list''\"\n raise TypeError(f\"Item in {where} must be str, \"\n f\"not {type(x).__name__}\")\n elif x =='*':\n if not recursive and hasattr(module,'__all__'):\n _handle_fromlist(module,module.__all__,import_,\n recursive=True )\n elif not hasattr(module,x):\n from_name='{}.{}'.format(module.__name__,x)\n try :\n _call_with_frames_removed(import_,from_name)\n except ModuleNotFoundError as exc:\n \n \n \n if (exc.name ==from_name and\n sys.modules.get(from_name,_NEEDS_LOADING)is not None ):\n continue\n raise\n return module\n \n \ndef _calc___package__(globals):\n ''\n\n\n\n\n \n package=globals.get('__package__')\n spec=globals.get('__spec__')\n if package is not None :\n if spec is not None and package !=spec.parent:\n _warnings.warn(\"__package__ != __spec__.parent \"\n f\"({package!r} != {spec.parent!r})\",\n ImportWarning,stacklevel=3)\n return package\n elif spec is not None :\n return spec.parent\n else :\n _warnings.warn(\"can't resolve package from __spec__ or __package__, \"\n \"falling back on __name__ and __path__\",\n ImportWarning,stacklevel=3)\n package=globals['__name__']\n if '__path__'not in globals:\n package=package.rpartition('.')[0]\n return package\n \n \ndef __import__(name,globals=None ,locals=None ,fromlist=(),level=0):\n ''\n\n\n\n\n\n\n\n\n \n if level ==0:\n module=_gcd_import(name)\n else :\n globals_=globals if globals is not None else {}\n package=_calc___package__(globals_)\n module=_gcd_import(name,package,level)\n if not fromlist:\n \n \n if level ==0:\n return _gcd_import(name.partition('.')[0])\n elif not name:\n return module\n else :\n \n \n cut_off=len(name)-len(name.partition('.')[0])\n \n \n return sys.modules[module.__name__[:len(module.__name__)-cut_off]]\n else :\n return _handle_fromlist(module,fromlist,_gcd_import)\n \n \ndef _builtin_from_name(name):\n spec=BuiltinImporter.find_spec(name)\n if spec is None :\n raise ImportError('no built-in module named '+name)\n return _load_unlocked(spec)\n \n \ndef _setup(sys_module,_imp_module):\n ''\n\n\n\n\n\n \n global _imp,sys\n _imp=_imp_module\n sys=sys_module\n \n \n module_type=type(sys)\n for name,module in sys.modules.items():\n if isinstance(module,module_type):\n if name in sys.builtin_module_names:\n loader=BuiltinImporter\n elif _imp.is_frozen(name):\n loader=FrozenImporter\n else :\n continue\n spec=_spec_from_module(module,loader)\n _init_module_attrs(spec,module)\n \n \n self_module=sys.modules[__name__]\n \n \n for builtin_name in ('_warnings',):\n if builtin_name not in sys.modules:\n builtin_module=_builtin_from_name(builtin_name)\n else :\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n \ndef _install(sys_module,_imp_module):\n ''\n _setup(sys_module,_imp_module)\n \n sys.meta_path.append(BuiltinImporter)\n sys.meta_path.append(FrozenImporter)\n \n \ndef _install_external_importers():\n ''\n global _bootstrap_external\n import _frozen_importlib_external\n _bootstrap_external=_frozen_importlib_external\n _frozen_importlib_external._install(sys.modules[__name__])\n", ["_frozen_importlib_external"]], "importlib._bootstrap_external": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_CASE_INSENSITIVE_PLATFORMS_STR_KEY='win',\n_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY='cygwin','darwin'\n_CASE_INSENSITIVE_PLATFORMS=(_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY\n+_CASE_INSENSITIVE_PLATFORMS_STR_KEY)\n\n\ndef _make_relax_case():\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS_STR_KEY):\n key='PYTHONCASEOK'\n else :\n key=b'PYTHONCASEOK'\n \n def _relax_case():\n ''\n return key in _os.environ\n else :\n def _relax_case():\n ''\n return False\n return _relax_case\n \n \ndef _w_long(x):\n ''\n return (int(x)&0xFFFFFFFF).to_bytes(4,'little')\n \n \ndef _r_long(int_bytes):\n ''\n return int.from_bytes(int_bytes,'little')\n \n \ndef _path_join(*path_parts):\n ''\n return path_sep.join([part.rstrip(path_separators)\n for part in path_parts if part])\n \n \ndef _path_split(path):\n ''\n if len(path_separators)==1:\n front,_,tail=path.rpartition(path_sep)\n return front,tail\n for x in reversed(path):\n if x in path_separators:\n front,tail=path.rsplit(x,maxsplit=1)\n return front,tail\n return '',path\n \n \ndef _path_stat(path):\n ''\n\n\n\n\n \n return _os.stat(path)\n \n \ndef _path_is_mode_type(path,mode):\n ''\n try :\n stat_info=_path_stat(path)\n except OSError:\n return False\n return (stat_info.st_mode&0o170000)==mode\n \n \ndef _path_isfile(path):\n ''\n return _path_is_mode_type(path,0o100000)\n \n \ndef _path_isdir(path):\n ''\n if not path:\n path=_os.getcwd()\n return _path_is_mode_type(path,0o040000)\n \n \ndef _write_atomic(path,data,mode=0o666):\n ''\n\n \n \n path_tmp='{}.{}'.format(path,id(path))\n fd=_os.open(path_tmp,\n _os.O_EXCL |_os.O_CREAT |_os.O_WRONLY,mode&0o666)\n try :\n \n \n with _io.FileIO(fd,'wb')as file:\n file.write(data)\n _os.replace(path_tmp,path)\n except OSError:\n try :\n _os.unlink(path_tmp)\n except OSError:\n pass\n raise\n \n \n_code_type=type(_write_atomic.__code__)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nMAGIC_NUMBER=(3394).to_bytes(2,'little')+b'\\r\\n'\n_RAW_MAGIC_NUMBER=int.from_bytes(MAGIC_NUMBER,'little')\n\n_PYCACHE='__pycache__'\n_OPT='opt-'\n\nSOURCE_SUFFIXES=['.py']\n\nBYTECODE_SUFFIXES=['.pyc']\n\nDEBUG_BYTECODE_SUFFIXES=OPTIMIZED_BYTECODE_SUFFIXES=BYTECODE_SUFFIXES\n\ndef cache_from_source(path,debug_override=None ,*,optimization=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if debug_override is not None :\n _warnings.warn('the debug_override parameter is deprecated; use '\n \"'optimization' instead\",DeprecationWarning)\n if optimization is not None :\n message='debug_override or optimization must be set to None'\n raise TypeError(message)\n optimization=''if debug_override else 1\n path=_os.fspath(path)\n head,tail=_path_split(path)\n base,sep,rest=tail.rpartition('.')\n tag=sys.implementation.cache_tag\n if tag is None :\n raise NotImplementedError('sys.implementation.cache_tag is None')\n almost_filename=''.join([(base if base else rest),sep,tag])\n if optimization is None :\n if sys.flags.optimize ==0:\n optimization=''\n else :\n optimization=sys.flags.optimize\n optimization=str(optimization)\n if optimization !='':\n if not optimization.isalnum():\n raise ValueError('{!r} is not alphanumeric'.format(optimization))\n almost_filename='{}.{}{}'.format(almost_filename,_OPT,optimization)\n return _path_join(head,_PYCACHE,almost_filename+BYTECODE_SUFFIXES[0])\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n \n if sys.implementation.cache_tag is None :\n raise NotImplementedError('sys.implementation.cache_tag is None')\n path=_os.fspath(path)\n head,pycache_filename=_path_split(path)\n head,pycache=_path_split(head)\n if pycache !=_PYCACHE:\n raise ValueError('{} not bottom-level directory in '\n '{!r}'.format(_PYCACHE,path))\n dot_count=pycache_filename.count('.')\n if dot_count not in {2,3}:\n raise ValueError('expected only 2 or 3 dots in '\n '{!r}'.format(pycache_filename))\n elif dot_count ==3:\n optimization=pycache_filename.rsplit('.',2)[-2]\n if not optimization.startswith(_OPT):\n raise ValueError(\"optimization portion of filename does not start \"\n \"with {!r}\".format(_OPT))\n opt_level=optimization[len(_OPT):]\n if not opt_level.isalnum():\n raise ValueError(\"optimization level {!r} is not an alphanumeric \"\n \"value\".format(optimization))\n base_filename=pycache_filename.partition('.')[0]\n return _path_join(head,base_filename+SOURCE_SUFFIXES[0])\n \n \ndef _get_sourcefile(bytecode_path):\n ''\n\n\n\n\n \n if len(bytecode_path)==0:\n return None\n rest,_,extension=bytecode_path.rpartition('.')\n if not rest or extension.lower()[-3:-1]!='py':\n return bytecode_path\n try :\n source_path=source_from_cache(bytecode_path)\n except (NotImplementedError,ValueError):\n source_path=bytecode_path[:-1]\n return source_path if _path_isfile(source_path)else bytecode_path\n \n \ndef _get_cached(filename):\n if filename.endswith(tuple(SOURCE_SUFFIXES)):\n try :\n return cache_from_source(filename)\n except NotImplementedError:\n pass\n elif filename.endswith(tuple(BYTECODE_SUFFIXES)):\n return filename\n else :\n return None\n \n \ndef _calc_mode(path):\n ''\n try :\n mode=_path_stat(path).st_mode\n except OSError:\n mode=0o666\n \n \n mode |=0o200\n return mode\n \n \ndef _check_name(method):\n ''\n\n\n\n\n\n \n def _check_name_wrapper(self,name=None ,*args,**kwargs):\n if name is None :\n name=self.name\n elif self.name !=name:\n raise ImportError('loader for %s cannot handle %s'%\n (self.name,name),name=name)\n return method(self,name,*args,**kwargs)\n try :\n _wrap=_bootstrap._wrap\n except NameError:\n \n def _wrap(new,old):\n for replace in ['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n _wrap(_check_name_wrapper,method)\n return _check_name_wrapper\n \n \ndef _find_module_shim(self,fullname):\n ''\n\n\n\n\n \n \n \n \n loader,portions=self.find_loader(fullname)\n if loader is None and len(portions):\n msg='Not importing directory {}: missing __init__'\n _warnings.warn(msg.format(portions[0]),ImportWarning)\n return loader\n \n \ndef _classify_pyc(data,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n magic=data[:4]\n if magic !=MAGIC_NUMBER:\n message=f'bad magic number in {name!r}: {magic!r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if len(data)<16:\n message=f'reached EOF while reading pyc header of {name!r}'\n _bootstrap._verbose_message('{}',message)\n raise EOFError(message)\n flags=_r_long(data[4:8])\n \n if flags&~0b11:\n message=f'invalid flags {flags!r} in {name!r}'\n raise ImportError(message,**exc_details)\n return flags\n \n \ndef _validate_timestamp_pyc(data,source_mtime,source_size,name,\nexc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _r_long(data[8:12])!=(source_mtime&0xFFFFFFFF):\n message=f'bytecode is stale for {name!r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if (source_size is not None and\n _r_long(data[12:16])!=(source_size&0xFFFFFFFF)):\n raise ImportError(f'bytecode is stale for {name!r}',**exc_details)\n \n \ndef _validate_hash_pyc(data,source_hash,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if data[8:16]!=source_hash:\n raise ImportError(\n f'hash in bytecode doesn\\'t match hash of source {name!r}',\n **exc_details,\n )\n \n \ndef _compile_bytecode(data,name=None ,bytecode_path=None ,source_path=None ):\n ''\n code=marshal.loads(data)\n if isinstance(code,_code_type):\n _bootstrap._verbose_message('code object from {!r}',bytecode_path)\n if source_path is not None :\n _imp._fix_co_filename(code,source_path)\n return code\n else :\n raise ImportError('Non-code object in {!r}'.format(bytecode_path),\n name=name,path=bytecode_path)\n \n \ndef _code_to_timestamp_pyc(code,mtime=0,source_size=0):\n ''\n data=bytearray(MAGIC_NUMBER)\n data.extend(_w_long(0))\n data.extend(_w_long(mtime))\n data.extend(_w_long(source_size))\n data.extend(marshal.dumps(code))\n return data\n \n \ndef _code_to_hash_pyc(code,source_hash,checked=True ):\n ''\n data=bytearray(MAGIC_NUMBER)\n flags=0b1 |checked <<1\n data.extend(_w_long(flags))\n assert len(source_hash)==8\n data.extend(source_hash)\n data.extend(marshal.dumps(code))\n return data\n \n \ndef decode_source(source_bytes):\n ''\n\n\n \n import tokenize\n source_bytes_readline=_io.BytesIO(source_bytes).readline\n encoding=tokenize.detect_encoding(source_bytes_readline)\n newline_decoder=_io.IncrementalNewlineDecoder(None ,True )\n return newline_decoder.decode(source_bytes.decode(encoding[0]))\n \n \n \n \n_POPULATE=object()\n\n\ndef spec_from_file_location(name,location=None ,*,loader=None ,\nsubmodule_search_locations=_POPULATE):\n ''\n\n\n\n\n\n\n\n\n \n if location is None :\n \n \n \n location=''\n if hasattr(loader,'get_filename'):\n \n try :\n location=loader.get_filename(name)\n except ImportError:\n pass\n else :\n location=_os.fspath(location)\n \n \n \n \n \n \n \n spec=_bootstrap.ModuleSpec(name,loader,origin=location)\n spec._set_fileattr=True\n \n \n if loader is None :\n for loader_class,suffixes in _get_supported_file_loaders():\n if location.endswith(tuple(suffixes)):\n loader=loader_class(name,location)\n spec.loader=loader\n break\n else :\n return None\n \n \n if submodule_search_locations is _POPULATE:\n \n if hasattr(loader,'is_package'):\n try :\n is_package=loader.is_package(name)\n except ImportError:\n pass\n else :\n if is_package:\n spec.submodule_search_locations=[]\n else :\n spec.submodule_search_locations=submodule_search_locations\n if spec.submodule_search_locations ==[]:\n if location:\n dirname=_path_split(location)[0]\n spec.submodule_search_locations.append(dirname)\n \n return spec\n \n \n \n \nclass WindowsRegistryFinder:\n\n ''\n \n REGISTRY_KEY=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}')\n REGISTRY_KEY_DEBUG=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}\\\\Debug')\n DEBUG_BUILD=False\n \n @classmethod\n def _open_registry(cls,key):\n try :\n return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,key)\n except OSError:\n return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,key)\n \n @classmethod\n def _search_registry(cls,fullname):\n if cls.DEBUG_BUILD:\n registry_key=cls.REGISTRY_KEY_DEBUG\n else :\n registry_key=cls.REGISTRY_KEY\n key=registry_key.format(fullname=fullname,\n sys_version='%d.%d'%sys.version_info[:2])\n try :\n with cls._open_registry(key)as hkey:\n filepath=_winreg.QueryValue(hkey,'')\n except OSError:\n return None\n return filepath\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n filepath=cls._search_registry(fullname)\n if filepath is None :\n return None\n try :\n _path_stat(filepath)\n except OSError:\n return None\n for loader,suffixes in _get_supported_file_loaders():\n if filepath.endswith(tuple(suffixes)):\n spec=_bootstrap.spec_from_loader(fullname,\n loader(fullname,filepath),\n origin=filepath)\n return spec\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n \n spec=cls.find_spec(fullname,path)\n if spec is not None :\n return spec.loader\n else :\n return None\n \n \nclass _LoaderBasics:\n\n ''\n \n \n def is_package(self,fullname):\n ''\n \n filename=_path_split(self.get_filename(fullname))[1]\n filename_base=filename.rsplit('.',1)[0]\n tail_name=fullname.rpartition('.')[2]\n return filename_base =='__init__'and tail_name !='__init__'\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n ''\n code=self.get_code(module.__name__)\n if code is None :\n raise ImportError('cannot load module {!r} when get_code() '\n 'returns None'.format(module.__name__))\n _bootstrap._call_with_frames_removed(exec,code,module.__dict__)\n \n def load_module(self,fullname):\n ''\n return _bootstrap._load_module_shim(self,fullname)\n \n \nclass SourceLoader(_LoaderBasics):\n\n def path_mtime(self,path):\n ''\n\n\n\n \n raise OSError\n \n def path_stats(self,path):\n ''\n\n\n\n\n\n\n\n\n \n return {'mtime':self.path_mtime(path)}\n \n def _cache_bytecode(self,source_path,cache_path,data):\n ''\n\n\n\n\n \n \n return self.set_data(cache_path,data)\n \n def set_data(self,path,data):\n ''\n\n\n \n \n \n def get_source(self,fullname):\n ''\n path=self.get_filename(fullname)\n try :\n source_bytes=self.get_data(path)\n except OSError as exc:\n raise ImportError('source not available through get_data()',\n name=fullname)from exc\n return decode_source(source_bytes)\n \n def source_to_code(self,data,path,*,_optimize=-1):\n ''\n\n\n \n return _bootstrap._call_with_frames_removed(compile,data,path,'exec',\n dont_inherit=True ,optimize=_optimize)\n \n def get_code(self,fullname):\n ''\n\n\n\n\n \n source_path=self.get_filename(fullname)\n source_mtime=None\n source_bytes=None\n source_hash=None\n hash_based=False\n check_source=True\n try :\n bytecode_path=cache_from_source(source_path)\n except NotImplementedError:\n bytecode_path=None\n else :\n try :\n st=self.path_stats(source_path)\n except OSError:\n pass\n else :\n source_mtime=int(st['mtime'])\n try :\n data=self.get_data(bytecode_path)\n except OSError:\n pass\n else :\n exc_details={\n 'name':fullname,\n 'path':bytecode_path,\n }\n try :\n flags=_classify_pyc(data,fullname,exc_details)\n bytes_data=memoryview(data)[16:]\n hash_based=flags&0b1 !=0\n if hash_based:\n check_source=flags&0b10 !=0\n if (_imp.check_hash_based_pycs !='never'and\n (check_source or\n _imp.check_hash_based_pycs =='always')):\n source_bytes=self.get_data(source_path)\n source_hash=_imp.source_hash(\n _RAW_MAGIC_NUMBER,\n source_bytes,\n )\n _validate_hash_pyc(data,source_hash,fullname,\n exc_details)\n else :\n _validate_timestamp_pyc(\n data,\n source_mtime,\n st['size'],\n fullname,\n exc_details,\n )\n except (ImportError,EOFError):\n pass\n else :\n _bootstrap._verbose_message('{} matches {}',bytecode_path,\n source_path)\n return _compile_bytecode(bytes_data,name=fullname,\n bytecode_path=bytecode_path,\n source_path=source_path)\n if source_bytes is None :\n source_bytes=self.get_data(source_path)\n code_object=self.source_to_code(source_bytes,source_path)\n _bootstrap._verbose_message('code object from {}',source_path)\n if (not sys.dont_write_bytecode and bytecode_path is not None and\n source_mtime is not None ):\n if hash_based:\n if source_hash is None :\n source_hash=_imp.source_hash(source_bytes)\n data=_code_to_hash_pyc(code_object,source_hash,check_source)\n else :\n data=_code_to_timestamp_pyc(code_object,source_mtime,\n len(source_bytes))\n try :\n self._cache_bytecode(source_path,bytecode_path,data)\n _bootstrap._verbose_message('wrote {!r}',bytecode_path)\n except NotImplementedError:\n pass\n return code_object\n \n \nclass FileLoader:\n\n ''\n \n \n def __init__(self,fullname,path):\n ''\n \n self.name=fullname\n self.path=path\n \n def __eq__(self,other):\n return (self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n @_check_name\n def load_module(self,fullname):\n ''\n\n\n\n \n \n \n \n return super(FileLoader,self).load_module(fullname)\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n def get_data(self,path):\n ''\n with _io.FileIO(path,'r')as file:\n return file.read()\n \n \n \n @_check_name\n def get_resource_reader(self,module):\n if self.is_package(module):\n return self\n return None\n \n def open_resource(self,resource):\n path=_path_join(_path_split(self.path)[0],resource)\n return _io.FileIO(path,'r')\n \n def resource_path(self,resource):\n if not self.is_resource(resource):\n raise FileNotFoundError\n path=_path_join(_path_split(self.path)[0],resource)\n return path\n \n def is_resource(self,name):\n if path_sep in name:\n return False\n path=_path_join(_path_split(self.path)[0],name)\n return _path_isfile(path)\n \n def contents(self):\n return iter(_os.listdir(_path_split(self.path)[0]))\n \n \nclass SourceFileLoader(FileLoader,SourceLoader):\n\n ''\n \n def path_stats(self,path):\n ''\n st=_path_stat(path)\n return {'mtime':st.st_mtime,'size':st.st_size}\n \n def _cache_bytecode(self,source_path,bytecode_path,data):\n \n mode=_calc_mode(source_path)\n return self.set_data(bytecode_path,data,_mode=mode)\n \n def set_data(self,path,data,*,_mode=0o666):\n ''\n parent,filename=_path_split(path)\n path_parts=[]\n \n while parent and not _path_isdir(parent):\n parent,part=_path_split(parent)\n path_parts.append(part)\n \n for part in reversed(path_parts):\n parent=_path_join(parent,part)\n try :\n _os.mkdir(parent)\n except FileExistsError:\n \n continue\n except OSError as exc:\n \n \n _bootstrap._verbose_message('could not create {!r}: {!r}',\n parent,exc)\n return\n try :\n _write_atomic(path,data,_mode)\n _bootstrap._verbose_message('created {!r}',path)\n except OSError as exc:\n \n _bootstrap._verbose_message('could not create {!r}: {!r}',path,\n exc)\n \n \nclass SourcelessFileLoader(FileLoader,_LoaderBasics):\n\n ''\n \n def get_code(self,fullname):\n path=self.get_filename(fullname)\n data=self.get_data(path)\n \n \n exc_details={\n 'name':fullname,\n 'path':path,\n }\n _classify_pyc(data,fullname,exc_details)\n return _compile_bytecode(\n memoryview(data)[16:],\n name=fullname,\n bytecode_path=path,\n )\n \n def get_source(self,fullname):\n ''\n return None\n \n \n \nEXTENSION_SUFFIXES=[]\n\n\nclass ExtensionFileLoader(FileLoader,_LoaderBasics):\n\n ''\n\n\n\n \n \n def __init__(self,name,path):\n self.name=name\n self.path=path\n \n def __eq__(self,other):\n return (self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n def create_module(self,spec):\n ''\n module=_bootstrap._call_with_frames_removed(\n _imp.create_dynamic,spec)\n _bootstrap._verbose_message('extension module {!r} loaded from {!r}',\n spec.name,self.path)\n return module\n \n def exec_module(self,module):\n ''\n _bootstrap._call_with_frames_removed(_imp.exec_dynamic,module)\n _bootstrap._verbose_message('extension module {!r} executed from {!r}',\n self.name,self.path)\n \n def is_package(self,fullname):\n ''\n file_name=_path_split(self.path)[1]\n return any(file_name =='__init__'+suffix\n for suffix in EXTENSION_SUFFIXES)\n \n def get_code(self,fullname):\n ''\n return None\n \n def get_source(self,fullname):\n ''\n return None\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n \nclass _NamespacePath:\n ''\n\n\n\n \n \n def __init__(self,name,path,path_finder):\n self._name=name\n self._path=path\n self._last_parent_path=tuple(self._get_parent_path())\n self._path_finder=path_finder\n \n def _find_parent_path_names(self):\n ''\n parent,dot,me=self._name.rpartition('.')\n if dot =='':\n \n return 'sys','path'\n \n \n return parent,'__path__'\n \n def _get_parent_path(self):\n parent_module_name,path_attr_name=self._find_parent_path_names()\n return getattr(sys.modules[parent_module_name],path_attr_name)\n \n def _recalculate(self):\n \n parent_path=tuple(self._get_parent_path())\n if parent_path !=self._last_parent_path:\n spec=self._path_finder(self._name,parent_path)\n \n \n if spec is not None and spec.loader is None :\n if spec.submodule_search_locations:\n self._path=spec.submodule_search_locations\n self._last_parent_path=parent_path\n return self._path\n \n def __iter__(self):\n return iter(self._recalculate())\n \n def __setitem__(self,index,path):\n self._path[index]=path\n \n def __len__(self):\n return len(self._recalculate())\n \n def __repr__(self):\n return '_NamespacePath({!r})'.format(self._path)\n \n def __contains__(self,item):\n return item in self._recalculate()\n \n def append(self,item):\n self._path.append(item)\n \n \n \nclass _NamespaceLoader:\n def __init__(self,name,path,path_finder):\n self._path=_NamespacePath(name,path,path_finder)\n \n @classmethod\n def module_repr(cls,module):\n ''\n\n\n\n \n return ''.format(module.__name__)\n \n def is_package(self,fullname):\n return True\n \n def get_source(self,fullname):\n return ''\n \n def get_code(self,fullname):\n return compile('','','exec',dont_inherit=True )\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n pass\n \n def load_module(self,fullname):\n ''\n\n\n\n \n \n _bootstrap._verbose_message('namespace module loaded with path {!r}',\n self._path)\n return _bootstrap._load_module_shim(self,fullname)\n \n \n \n \nclass PathFinder:\n\n ''\n \n @classmethod\n def invalidate_caches(cls):\n ''\n \n for name,finder in list(sys.path_importer_cache.items()):\n if finder is None :\n del sys.path_importer_cache[name]\n elif hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n @classmethod\n def _path_hooks(cls,path):\n ''\n if sys.path_hooks is not None and not sys.path_hooks:\n _warnings.warn('sys.path_hooks is empty',ImportWarning)\n for hook in sys.path_hooks:\n try :\n return hook(path)\n except ImportError:\n continue\n else :\n return None\n \n @classmethod\n def _path_importer_cache(cls,path):\n ''\n\n\n\n\n \n if path =='':\n try :\n path=_os.getcwd()\n except FileNotFoundError:\n \n \n return None\n try :\n finder=sys.path_importer_cache[path]\n except KeyError:\n finder=cls._path_hooks(path)\n sys.path_importer_cache[path]=finder\n return finder\n \n @classmethod\n def _legacy_get_spec(cls,fullname,finder):\n \n \n if hasattr(finder,'find_loader'):\n loader,portions=finder.find_loader(fullname)\n else :\n loader=finder.find_module(fullname)\n portions=[]\n if loader is not None :\n return _bootstrap.spec_from_loader(fullname,loader)\n spec=_bootstrap.ModuleSpec(fullname,None )\n spec.submodule_search_locations=portions\n return spec\n \n @classmethod\n def _get_spec(cls,fullname,path,target=None ):\n ''\n \n \n namespace_path=[]\n for entry in path:\n if not isinstance(entry,(str,bytes)):\n continue\n finder=cls._path_importer_cache(entry)\n if finder is not None :\n if hasattr(finder,'find_spec'):\n spec=finder.find_spec(fullname,target)\n else :\n spec=cls._legacy_get_spec(fullname,finder)\n if spec is None :\n continue\n if spec.loader is not None :\n return spec\n portions=spec.submodule_search_locations\n if portions is None :\n raise ImportError('spec missing loader')\n \n \n \n \n namespace_path.extend(portions)\n else :\n spec=_bootstrap.ModuleSpec(fullname,None )\n spec.submodule_search_locations=namespace_path\n return spec\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n ''\n\n\n \n if path is None :\n path=sys.path\n spec=cls._get_spec(fullname,path,target)\n if spec is None :\n return None\n elif spec.loader is None :\n namespace_path=spec.submodule_search_locations\n if namespace_path:\n \n \n spec.origin=None\n spec.submodule_search_locations=_NamespacePath(fullname,namespace_path,cls._get_spec)\n return spec\n else :\n return None\n else :\n return spec\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n\n \n spec=cls.find_spec(fullname,path)\n if spec is None :\n return None\n return spec.loader\n \n \nclass FileFinder:\n\n ''\n\n\n\n\n \n \n def __init__(self,path,*loader_details):\n ''\n\n \n loaders=[]\n for loader,suffixes in loader_details:\n loaders.extend((suffix,loader)for suffix in suffixes)\n self._loaders=loaders\n \n self.path=path or '.'\n self._path_mtime=-1\n self._path_cache=set()\n self._relaxed_path_cache=set()\n \n def invalidate_caches(self):\n ''\n self._path_mtime=-1\n \n find_module=_find_module_shim\n \n def find_loader(self,fullname):\n ''\n\n\n\n\n \n spec=self.find_spec(fullname)\n if spec is None :\n return None ,[]\n return spec.loader,spec.submodule_search_locations or []\n \n def _get_spec(self,loader_class,fullname,path,smsl,target):\n loader=loader_class(fullname,path)\n return spec_from_file_location(fullname,path,loader=loader,\n submodule_search_locations=smsl)\n \n def find_spec(self,fullname,target=None ):\n ''\n\n\n \n is_namespace=False\n tail_module=fullname.rpartition('.')[2]\n try :\n mtime=_path_stat(self.path or _os.getcwd()).st_mtime\n except OSError:\n mtime=-1\n if mtime !=self._path_mtime:\n self._fill_cache()\n self._path_mtime=mtime\n \n if _relax_case():\n cache=self._relaxed_path_cache\n cache_module=tail_module.lower()\n else :\n cache=self._path_cache\n cache_module=tail_module\n \n if cache_module in cache:\n base_path=_path_join(self.path,tail_module)\n for suffix,loader_class in self._loaders:\n init_filename='__init__'+suffix\n full_path=_path_join(base_path,init_filename)\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,[base_path],target)\n else :\n \n \n is_namespace=_path_isdir(base_path)\n \n for suffix,loader_class in self._loaders:\n full_path=_path_join(self.path,tail_module+suffix)\n _bootstrap._verbose_message('trying {}',full_path,verbosity=2)\n if cache_module+suffix in cache:\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,\n None ,target)\n if is_namespace:\n _bootstrap._verbose_message('possible namespace for {}',base_path)\n spec=_bootstrap.ModuleSpec(fullname,None )\n spec.submodule_search_locations=[base_path]\n return spec\n return None\n \n def _fill_cache(self):\n ''\n path=self.path\n try :\n contents=_os.listdir(path or _os.getcwd())\n except (FileNotFoundError,PermissionError,NotADirectoryError):\n \n \n contents=[]\n \n \n if not sys.platform.startswith('win'):\n self._path_cache=set(contents)\n else :\n \n \n \n \n \n lower_suffix_contents=set()\n for item in contents:\n name,dot,suffix=item.partition('.')\n if dot:\n new_name='{}.{}'.format(name,suffix.lower())\n else :\n new_name=name\n lower_suffix_contents.add(new_name)\n self._path_cache=lower_suffix_contents\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n self._relaxed_path_cache={fn.lower()for fn in contents}\n \n @classmethod\n def path_hook(cls,*loader_details):\n ''\n\n\n\n\n\n\n \n def path_hook_for_FileFinder(path):\n ''\n if not _path_isdir(path):\n raise ImportError('only directories are supported',path=path)\n return cls(path,*loader_details)\n \n return path_hook_for_FileFinder\n \n def __repr__(self):\n return 'FileFinder({!r})'.format(self.path)\n \n \n \n \ndef _fix_up_module(ns,name,pathname,cpathname=None ):\n\n loader=ns.get('__loader__')\n spec=ns.get('__spec__')\n if not loader:\n if spec:\n loader=spec.loader\n elif pathname ==cpathname:\n loader=SourcelessFileLoader(name,pathname)\n else :\n loader=SourceFileLoader(name,pathname)\n if not spec:\n spec=spec_from_file_location(name,pathname,loader=loader)\n try :\n ns['__spec__']=spec\n ns['__loader__']=loader\n ns['__file__']=pathname\n ns['__cached__']=cpathname\n except Exception:\n \n pass\n \n \ndef _get_supported_file_loaders():\n ''\n\n\n \n extensions=ExtensionFileLoader,_imp.extension_suffixes()\n source=SourceFileLoader,SOURCE_SUFFIXES\n bytecode=SourcelessFileLoader,BYTECODE_SUFFIXES\n return [extensions,source,bytecode]\n \n \ndef _setup(_bootstrap_module):\n ''\n\n\n\n\n \n global sys,_imp,_bootstrap\n _bootstrap=_bootstrap_module\n sys=_bootstrap.sys\n _imp=_bootstrap._imp\n \n \n self_module=sys.modules[__name__]\n \n \n for builtin_name in ('_warnings','builtins','marshal'):\n if builtin_name not in sys.modules:\n builtin_module=_bootstrap._builtin_from_name(builtin_name)\n else :\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n \n os_details=('posix',['/']),('nt',['\\\\','/'])\n for builtin_os,path_separators in os_details:\n \n assert all(len(sep)==1 for sep in path_separators)\n path_sep=path_separators[0]\n if builtin_os in sys.modules:\n os_module=sys.modules[builtin_os]\n break\n else :\n try :\n os_module=_bootstrap._builtin_from_name(builtin_os)\n break\n except ImportError:\n continue\n else :\n raise ImportError('importlib requires posix or nt')\n setattr(self_module,'_os',os_module)\n setattr(self_module,'path_sep',path_sep)\n setattr(self_module,'path_separators',''.join(path_separators))\n \n \n \n \n import _thread\n setattr(self_module,'_thread',_thread)\n \n \n \n \n import _weakref\n setattr(self_module,'_weakref',_weakref)\n \n \n if builtin_os =='nt':\n winreg_module=_bootstrap._builtin_from_name('winreg')\n setattr(self_module,'_winreg',winreg_module)\n \n \n setattr(self_module,'_relax_case',_make_relax_case())\n EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())\n if builtin_os =='nt':\n SOURCE_SUFFIXES.append('.pyw')\n if '_d.pyd'in EXTENSION_SUFFIXES:\n WindowsRegistryFinder.DEBUG_BUILD=True\n \n \ndef _install(_bootstrap_module):\n ''\n _setup(_bootstrap_module)\n supported_loaders=_get_supported_file_loaders()\n sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])\n sys.meta_path.append(PathFinder)\n", ["_thread", "_weakref", "tokenize"]], "importlib": [".py", "''\n__all__=['__import__','import_module','invalidate_caches','reload']\n\n\n\n\n\n\n\n\n\nimport _imp\nimport sys\n\ntry :\n import _frozen_importlib as _bootstrap\nexcept ImportError:\n from . import _bootstrap\n _bootstrap._setup(sys,_imp)\nelse :\n\n\n _bootstrap.__name__='importlib._bootstrap'\n _bootstrap.__package__='importlib'\n try :\n _bootstrap.__file__=__file__.replace('__init__.py','_bootstrap.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap']=_bootstrap\n \ntry :\n import _frozen_importlib_external as _bootstrap_external\nexcept ImportError:\n from . import _bootstrap_external\n _bootstrap_external._setup(_bootstrap)\n _bootstrap._bootstrap_external=_bootstrap_external\nelse :\n _bootstrap_external.__name__='importlib._bootstrap_external'\n _bootstrap_external.__package__='importlib'\n try :\n _bootstrap_external.__file__=__file__.replace('__init__.py','_bootstrap_external.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap_external']=_bootstrap_external\n \n \n_w_long=_bootstrap_external._w_long\n_r_long=_bootstrap_external._r_long\n\n\n\n\nimport types\nimport warnings\n\n\n\n\nfrom ._bootstrap import __import__\n\n\ndef invalidate_caches():\n ''\n \n for finder in sys.meta_path:\n if hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n \ndef find_loader(name,path=None ):\n ''\n\n\n\n\n\n \n warnings.warn('Deprecated since Python 3.4. '\n 'Use importlib.util.find_spec() instead.',\n DeprecationWarning,stacklevel=2)\n try :\n loader=sys.modules[name].__loader__\n if loader is None :\n raise ValueError('{}.__loader__ is None'.format(name))\n else :\n return loader\n except KeyError:\n pass\n except AttributeError:\n raise ValueError('{}.__loader__ is not set'.format(name))from None\n \n spec=_bootstrap._find_spec(name,path)\n \n if spec is None :\n return None\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('spec for {} missing loader'.format(name),\n name=name)\n raise ImportError('namespace packages do not have loaders',\n name=name)\n return spec.loader\n \n \ndef import_module(name,package=None ):\n ''\n\n\n\n\n\n \n level=0\n if name.startswith('.'):\n if not package:\n msg=(\"the 'package' argument is required to perform a relative \"\n \"import for {!r}\")\n raise TypeError(msg.format(name))\n for character in name:\n if character !='.':\n break\n level +=1\n return _bootstrap._gcd_import(name[level:],package,level)\n \n \n_RELOADING={}\n\n\ndef reload(module):\n ''\n\n\n\n \n if not module or not isinstance(module,types.ModuleType):\n raise TypeError(\"reload() argument must be a module\")\n try :\n name=module.__spec__.name\n except AttributeError:\n name=module.__name__\n \n if sys.modules.get(name)is not module:\n msg=\"module {} not in sys.modules\"\n raise ImportError(msg.format(name),name=name)\n if name in _RELOADING:\n return _RELOADING[name]\n _RELOADING[name]=module\n try :\n parent_name=name.rpartition('.')[0]\n if parent_name:\n try :\n parent=sys.modules[parent_name]\n except KeyError:\n msg=\"parent {!r} not in sys.modules\"\n raise ImportError(msg.format(parent_name),\n name=parent_name)from None\n else :\n pkgpath=parent.__path__\n else :\n pkgpath=None\n target=module\n spec=module.__spec__=_bootstrap._find_spec(name,pkgpath,target)\n if spec is None :\n raise ModuleNotFoundError(f\"spec not found for the module {name!r}\",name=name)\n _bootstrap._exec(spec,module)\n \n return sys.modules[name]\n finally :\n try :\n del _RELOADING[name]\n except KeyError:\n pass\n", ["_frozen_importlib", "_frozen_importlib_external", "_imp", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "sys", "types", "warnings"], 1], "json.decoder": [".py", "''\n\nimport re\n\nfrom json import scanner\ntry :\n from _json import scanstring as c_scanstring\nexcept ImportError:\n c_scanstring=None\n \n__all__=['JSONDecoder','JSONDecodeError']\n\nFLAGS=re.VERBOSE |re.MULTILINE |re.DOTALL\n\nNaN=float('nan')\nPosInf=float('inf')\nNegInf=float('-inf')\n\n\nclass JSONDecodeError(ValueError):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,msg,doc,pos):\n lineno=doc.count('\\n',0,pos)+1\n colno=pos -doc.rfind('\\n',0,pos)\n errmsg='%s: line %d column %d (char %d)'%(msg,lineno,colno,pos)\n ValueError.__init__(self,errmsg)\n self.msg=msg\n self.doc=doc\n self.pos=pos\n self.lineno=lineno\n self.colno=colno\n \n def __reduce__(self):\n return self.__class__,(self.msg,self.doc,self.pos)\n \n \n_CONSTANTS={\n'-Infinity':NegInf,\n'Infinity':PosInf,\n'NaN':NaN,\n}\n\n\nSTRINGCHUNK=re.compile(r'(.*?)([\"\\\\\\x00-\\x1f])',FLAGS)\nBACKSLASH={\n'\"':'\"','\\\\':'\\\\','/':'/',\n'b':'\\b','f':'\\f','n':'\\n','r':'\\r','t':'\\t',\n}\n\ndef _decode_uXXXX(s,pos):\n esc=s[pos+1:pos+5]\n if len(esc)==4 and esc[1]not in 'xX':\n try :\n return int(esc,16)\n except ValueError:\n pass\n msg=\"Invalid \\\\uXXXX escape\"\n raise JSONDecodeError(msg,s,pos)\n \ndef py_scanstring(s,end,strict=True ,\n_b=BACKSLASH,_m=STRINGCHUNK.match):\n ''\n\n\n\n\n\n\n \n chunks=[]\n _append=chunks.append\n begin=end -1\n while 1:\n chunk=_m(s,end)\n if chunk is None :\n raise JSONDecodeError(\"Unterminated string starting at\",s,begin)\n end=chunk.end()\n content,terminator=chunk.groups()\n \n if content:\n _append(content)\n \n \n if terminator =='\"':\n break\n elif terminator !='\\\\':\n if strict:\n \n msg=\"Invalid control character {0!r} at\".format(terminator)\n raise JSONDecodeError(msg,s,end)\n else :\n _append(terminator)\n continue\n try :\n esc=s[end]\n except IndexError:\n raise JSONDecodeError(\"Unterminated string starting at\",\n s,begin)from None\n \n if esc !='u':\n try :\n char=_b[esc]\n except KeyError:\n msg=\"Invalid \\\\escape: {0!r}\".format(esc)\n raise JSONDecodeError(msg,s,end)\n end +=1\n else :\n uni=_decode_uXXXX(s,end)\n end +=5\n if 0xd800 <=uni <=0xdbff and s[end:end+2]=='\\\\u':\n uni2=_decode_uXXXX(s,end+1)\n if 0xdc00 <=uni2 <=0xdfff:\n uni=0x10000+(((uni -0xd800)<<10)|(uni2 -0xdc00))\n end +=6\n char=chr(uni)\n _append(char)\n return ''.join(chunks),end\n \n \n \nscanstring=c_scanstring or py_scanstring\n\nWHITESPACE=re.compile(r'[ \\t\\n\\r]*',FLAGS)\nWHITESPACE_STR=' \\t\\n\\r'\n\n\ndef JSONObject(s_and_end,strict,scan_once,object_hook,object_pairs_hook,\nmemo=None ,_w=WHITESPACE.match,_ws=WHITESPACE_STR):\n s,end=s_and_end\n pairs=[]\n pairs_append=pairs.append\n \n if memo is None :\n memo={}\n memo_get=memo.setdefault\n \n \n nextchar=s[end:end+1]\n \n if nextchar !='\"':\n if nextchar in _ws:\n end=_w(s,end).end()\n nextchar=s[end:end+1]\n \n if nextchar =='}':\n if object_pairs_hook is not None :\n result=object_pairs_hook(pairs)\n return result,end+1\n pairs={}\n if object_hook is not None :\n pairs=object_hook(pairs)\n return pairs,end+1\n elif nextchar !='\"':\n raise JSONDecodeError(\n \"Expecting property name enclosed in double quotes\",s,end)\n end +=1\n while True :\n key,end=scanstring(s,end,strict)\n key=memo_get(key,key)\n \n \n if s[end:end+1]!=':':\n end=_w(s,end).end()\n if s[end:end+1]!=':':\n raise JSONDecodeError(\"Expecting ':' delimiter\",s,end)\n end +=1\n \n try :\n if s[end]in _ws:\n end +=1\n if s[end]in _ws:\n end=_w(s,end+1).end()\n except IndexError:\n pass\n \n try :\n value,end=scan_once(s,end)\n except StopIteration as err:\n raise JSONDecodeError(\"Expecting value\",s,err.value)from None\n pairs_append((key,value))\n try :\n nextchar=s[end]\n if nextchar in _ws:\n end=_w(s,end+1).end()\n nextchar=s[end]\n except IndexError:\n nextchar=''\n end +=1\n \n if nextchar =='}':\n break\n elif nextchar !=',':\n raise JSONDecodeError(\"Expecting ',' delimiter\",s,end -1)\n end=_w(s,end).end()\n nextchar=s[end:end+1]\n end +=1\n if nextchar !='\"':\n raise JSONDecodeError(\n \"Expecting property name enclosed in double quotes\",s,end -1)\n if object_pairs_hook is not None :\n result=object_pairs_hook(pairs)\n return result,end\n pairs=dict(pairs)\n if object_hook is not None :\n pairs=object_hook(pairs)\n return pairs,end\n \ndef JSONArray(s_and_end,scan_once,_w=WHITESPACE.match,_ws=WHITESPACE_STR):\n s,end=s_and_end\n values=[]\n nextchar=s[end:end+1]\n if nextchar in _ws:\n end=_w(s,end+1).end()\n nextchar=s[end:end+1]\n \n if nextchar ==']':\n return values,end+1\n _append=values.append\n while True :\n try :\n value,end=scan_once(s,end)\n except StopIteration as err:\n raise JSONDecodeError(\"Expecting value\",s,err.value)from None\n _append(value)\n nextchar=s[end:end+1]\n if nextchar in _ws:\n end=_w(s,end+1).end()\n nextchar=s[end:end+1]\n end +=1\n if nextchar ==']':\n break\n elif nextchar !=',':\n raise JSONDecodeError(\"Expecting ',' delimiter\",s,end -1)\n try :\n if s[end]in _ws:\n end +=1\n if s[end]in _ws:\n end=_w(s,end+1).end()\n except IndexError:\n pass\n \n return values,end\n \n \nclass JSONDecoder(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,*,object_hook=None ,parse_float=None ,\n parse_int=None ,parse_constant=None ,strict=True ,\n object_pairs_hook=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.object_hook=object_hook\n self.parse_float=parse_float or float\n self.parse_int=parse_int or int\n self.parse_constant=parse_constant or _CONSTANTS.__getitem__\n self.strict=strict\n self.object_pairs_hook=object_pairs_hook\n self.parse_object=JSONObject\n self.parse_array=JSONArray\n self.parse_string=scanstring\n self.memo={}\n self.scan_once=scanner.make_scanner(self)\n \n \n def decode(self,s,_w=WHITESPACE.match):\n ''\n\n\n \n obj,end=self.raw_decode(s,idx=_w(s,0).end())\n end=_w(s,end).end()\n if end !=len(s):\n raise JSONDecodeError(\"Extra data\",s,end)\n return obj\n \n def raw_decode(self,s,idx=0):\n ''\n\n\n\n\n\n\n \n try :\n obj,end=self.scan_once(s,idx)\n except StopIteration as err:\n raise JSONDecodeError(\"Expecting value\",s,err.value)from None\n return obj,end\n", ["_json", "json", "json.scanner", "re"]], "json.encoder": [".py", "''\n\nimport re\n\ntry :\n from _json import encode_basestring_ascii as c_encode_basestring_ascii\nexcept ImportError:\n c_encode_basestring_ascii=None\ntry :\n from _json import encode_basestring as c_encode_basestring\nexcept ImportError:\n c_encode_basestring=None\ntry :\n from _json import make_encoder as c_make_encoder\nexcept ImportError:\n c_make_encoder=None\n \nESCAPE=re.compile(r'[\\x00-\\x1f\\\\\"\\b\\f\\n\\r\\t]')\nESCAPE_ASCII=re.compile(r'([\\\\\"]|[^\\ -~])')\nHAS_UTF8=re.compile(b'[\\x80-\\xff]')\nESCAPE_DCT={\n'\\\\':'\\\\\\\\',\n'\"':'\\\\\"',\n'\\b':'\\\\b',\n'\\f':'\\\\f',\n'\\n':'\\\\n',\n'\\r':'\\\\r',\n'\\t':'\\\\t',\n}\nfor i in range(0x20):\n ESCAPE_DCT.setdefault(chr(i),'\\\\u{0:04x}'.format(i))\n \n \nINFINITY=float('inf')\n\ndef py_encode_basestring(s):\n ''\n\n \n def replace(match):\n return ESCAPE_DCT[match.group(0)]\n return '\"'+ESCAPE.sub(replace,s)+'\"'\n \n \nencode_basestring=(c_encode_basestring or py_encode_basestring)\n\n\ndef py_encode_basestring_ascii(s):\n ''\n\n \n def replace(match):\n s=match.group(0)\n try :\n return ESCAPE_DCT[s]\n except KeyError:\n n=ord(s)\n if n <0x10000:\n return '\\\\u{0:04x}'.format(n)\n \n else :\n \n n -=0x10000\n s1=0xd800 |((n >>10)&0x3ff)\n s2=0xdc00 |(n&0x3ff)\n return '\\\\u{0:04x}\\\\u{1:04x}'.format(s1,s2)\n return '\"'+ESCAPE_ASCII.sub(replace,s)+'\"'\n \n \nencode_basestring_ascii=(\nc_encode_basestring_ascii or py_encode_basestring_ascii)\n\nclass JSONEncoder(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n item_separator=', '\n key_separator=': '\n def __init__(self,*,skipkeys=False ,ensure_ascii=True ,\n check_circular=True ,allow_nan=True ,sort_keys=False ,\n indent=None ,separators=None ,default=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n self.skipkeys=skipkeys\n self.ensure_ascii=ensure_ascii\n self.check_circular=check_circular\n self.allow_nan=allow_nan\n self.sort_keys=sort_keys\n self.indent=indent\n if separators is not None :\n self.item_separator,self.key_separator=separators\n elif indent is not None :\n self.item_separator=','\n if default is not None :\n self.default=default\n \n def default(self,o):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise TypeError(f'Object of type {o.__class__.__name__} '\n f'is not JSON serializable')\n \n def encode(self,o):\n ''\n\n\n\n\n\n \n \n if isinstance(o,str):\n if self.ensure_ascii:\n return encode_basestring_ascii(o)\n else :\n return encode_basestring(o)\n \n \n \n chunks=self.iterencode(o,_one_shot=True )\n if not isinstance(chunks,(list,tuple)):\n chunks=list(chunks)\n return ''.join(chunks)\n \n def iterencode(self,o,_one_shot=False ):\n ''\n\n\n\n\n\n\n\n \n if self.check_circular:\n markers={}\n else :\n markers=None\n if self.ensure_ascii:\n _encoder=encode_basestring_ascii\n else :\n _encoder=encode_basestring\n \n def floatstr(o,allow_nan=self.allow_nan,\n _repr=float.__repr__,_inf=INFINITY,_neginf=-INFINITY):\n \n \n \n \n if o !=o:\n text='NaN'\n elif o ==_inf:\n text='Infinity'\n elif o ==_neginf:\n text='-Infinity'\n else :\n return _repr(o)\n \n if not allow_nan:\n raise ValueError(\n \"Out of range float values are not JSON compliant: \"+\n repr(o))\n \n return text\n \n \n if (_one_shot and c_make_encoder is not None\n and self.indent is None ):\n _iterencode=c_make_encoder(\n markers,self.default,_encoder,self.indent,\n self.key_separator,self.item_separator,self.sort_keys,\n self.skipkeys,self.allow_nan)\n else :\n _iterencode=_make_iterencode(\n markers,self.default,_encoder,self.indent,floatstr,\n self.key_separator,self.item_separator,self.sort_keys,\n self.skipkeys,_one_shot)\n return _iterencode(o,0)\n \ndef _make_iterencode(markers,_default,_encoder,_indent,_floatstr,\n_key_separator,_item_separator,_sort_keys,_skipkeys,_one_shot,\n\nValueError=ValueError,\ndict=dict,\nfloat=float,\nid=id,\nint=int,\nisinstance=isinstance,\nlist=list,\nstr=str,\ntuple=tuple,\n_intstr=int.__str__,\n):\n\n if _indent is not None and not isinstance(_indent,str):\n _indent=' '*_indent\n \n def _iterencode_list(lst,_current_indent_level):\n if not lst:\n yield '[]'\n return\n if markers is not None :\n markerid=id(lst)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=lst\n buf='['\n if _indent is not None :\n _current_indent_level +=1\n newline_indent='\\n'+_indent *_current_indent_level\n separator=_item_separator+newline_indent\n buf +=newline_indent\n else :\n newline_indent=None\n separator=_item_separator\n first=True\n for value in lst:\n if first:\n first=False\n else :\n buf=separator\n if isinstance(value,str):\n yield buf+_encoder(value)\n elif value is None :\n yield buf+'null'\n elif value is True :\n yield buf+'true'\n elif value is False :\n yield buf+'false'\n elif isinstance(value,int):\n \n \n \n yield buf+_intstr(value)\n elif isinstance(value,float):\n \n yield buf+_floatstr(value)\n else :\n yield buf\n if isinstance(value,(list,tuple)):\n chunks=_iterencode_list(value,_current_indent_level)\n elif isinstance(value,dict):\n chunks=_iterencode_dict(value,_current_indent_level)\n else :\n chunks=_iterencode(value,_current_indent_level)\n yield from chunks\n if newline_indent is not None :\n _current_indent_level -=1\n yield '\\n'+_indent *_current_indent_level\n yield ']'\n if markers is not None :\n del markers[markerid]\n \n def _iterencode_dict(dct,_current_indent_level):\n if not dct:\n yield '{}'\n return\n if markers is not None :\n markerid=id(dct)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=dct\n yield '{'\n if _indent is not None :\n _current_indent_level +=1\n newline_indent='\\n'+_indent *_current_indent_level\n item_separator=_item_separator+newline_indent\n yield newline_indent\n else :\n newline_indent=None\n item_separator=_item_separator\n first=True\n if _sort_keys:\n items=sorted(dct.items(),key=lambda kv:kv[0])\n else :\n items=dct.items()\n for key,value in items:\n if isinstance(key,str):\n pass\n \n \n elif isinstance(key,float):\n \n key=_floatstr(key)\n elif key is True :\n key='true'\n elif key is False :\n key='false'\n elif key is None :\n key='null'\n elif isinstance(key,int):\n \n key=_intstr(key)\n elif _skipkeys:\n continue\n else :\n raise TypeError(f'keys must be str, int, float, bool or None, '\n f'not {key.__class__.__name__}')\n if first:\n first=False\n else :\n yield item_separator\n yield _encoder(key)\n yield _key_separator\n if isinstance(value,str):\n yield _encoder(value)\n elif value is None :\n yield 'null'\n elif value is True :\n yield 'true'\n elif value is False :\n yield 'false'\n elif isinstance(value,int):\n \n yield _intstr(value)\n elif isinstance(value,float):\n \n yield _floatstr(value)\n else :\n if isinstance(value,(list,tuple)):\n chunks=_iterencode_list(value,_current_indent_level)\n elif isinstance(value,dict):\n chunks=_iterencode_dict(value,_current_indent_level)\n else :\n chunks=_iterencode(value,_current_indent_level)\n yield from chunks\n if newline_indent is not None :\n _current_indent_level -=1\n yield '\\n'+_indent *_current_indent_level\n yield '}'\n if markers is not None :\n del markers[markerid]\n \n def _iterencode(o,_current_indent_level):\n if isinstance(o,str):\n yield _encoder(o)\n elif o is None :\n yield 'null'\n elif o is True :\n yield 'true'\n elif o is False :\n yield 'false'\n elif isinstance(o,int):\n \n yield _intstr(o)\n elif isinstance(o,float):\n \n yield _floatstr(o)\n elif isinstance(o,(list,tuple)):\n yield from _iterencode_list(o,_current_indent_level)\n elif isinstance(o,dict):\n yield from _iterencode_dict(o,_current_indent_level)\n else :\n if markers is not None :\n markerid=id(o)\n if markerid in markers:\n raise ValueError(\"Circular reference detected\")\n markers[markerid]=o\n o=_default(o)\n yield from _iterencode(o,_current_indent_level)\n if markers is not None :\n del markers[markerid]\n return _iterencode\n", ["_json", "re"]], "json.scanner": [".py", "''\n\nimport re\ntry :\n from _json import make_scanner as c_make_scanner\nexcept ImportError:\n c_make_scanner=None\n \n__all__=['make_scanner']\n\nNUMBER_RE=re.compile(\nr'(-?(?:0|[1-9]\\d*))(\\.\\d+)?([eE][-+]?\\d+)?',\n(re.VERBOSE |re.MULTILINE |re.DOTALL))\n\ndef py_make_scanner(context):\n parse_object=context.parse_object\n parse_array=context.parse_array\n parse_string=context.parse_string\n match_number=NUMBER_RE.match\n strict=context.strict\n parse_float=context.parse_float\n parse_int=context.parse_int\n parse_constant=context.parse_constant\n object_hook=context.object_hook\n object_pairs_hook=context.object_pairs_hook\n memo=context.memo\n \n def _scan_once(string,idx):\n try :\n nextchar=string[idx]\n except IndexError:\n raise StopIteration(idx)from None\n \n if nextchar =='\"':\n return parse_string(string,idx+1,strict)\n elif nextchar =='{':\n return parse_object((string,idx+1),strict,\n _scan_once,object_hook,object_pairs_hook,memo)\n elif nextchar =='[':\n return parse_array((string,idx+1),_scan_once)\n elif nextchar =='n'and string[idx:idx+4]=='null':\n return None ,idx+4\n elif nextchar =='t'and string[idx:idx+4]=='true':\n return True ,idx+4\n elif nextchar =='f'and string[idx:idx+5]=='false':\n return False ,idx+5\n \n m=match_number(string,idx)\n if m is not None :\n integer,frac,exp=m.groups()\n if frac or exp:\n res=parse_float(integer+(frac or '')+(exp or ''))\n else :\n res=parse_int(integer)\n return res,m.end()\n elif nextchar =='N'and string[idx:idx+3]=='NaN':\n return parse_constant('NaN'),idx+3\n elif nextchar =='I'and string[idx:idx+8]=='Infinity':\n return parse_constant('Infinity'),idx+8\n elif nextchar =='-'and string[idx:idx+9]=='-Infinity':\n return parse_constant('-Infinity'),idx+9\n else :\n raise StopIteration(idx)\n \n def scan_once(string,idx):\n try :\n return _scan_once(string,idx)\n finally :\n memo.clear()\n \n return scan_once\n \nmake_scanner=c_make_scanner or py_make_scanner\n", ["_json", "re"]], "json.tool": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport argparse\nimport json\nimport sys\n\n\ndef main():\n prog='python -m json.tool'\n description=('A simple command line interface for json module '\n 'to validate and pretty-print JSON objects.')\n parser=argparse.ArgumentParser(prog=prog,description=description)\n parser.add_argument('infile',nargs='?',type=argparse.FileType(),\n help='a JSON file to be validated or pretty-printed')\n parser.add_argument('outfile',nargs='?',type=argparse.FileType('w'),\n help='write the output of infile to outfile')\n parser.add_argument('--sort-keys',action='store_true',default=False ,\n help='sort the output of dictionaries alphabetically by key')\n options=parser.parse_args()\n \n infile=options.infile or sys.stdin\n outfile=options.outfile or sys.stdout\n sort_keys=options.sort_keys\n with infile:\n try :\n obj=json.load(infile)\n except ValueError as e:\n raise SystemExit(e)\n with outfile:\n json.dump(obj,outfile,sort_keys=sort_keys,indent=4)\n outfile.write('\\n')\n \n \nif __name__ =='__main__':\n main()\n", ["argparse", "json", "sys"]], "json": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='2.0.9'\n__all__=[\n'dump','dumps','load','loads',\n'JSONDecoder','JSONDecodeError','JSONEncoder',\n]\n\n__author__='Bob Ippolito '\n\nfrom .decoder import JSONDecoder,JSONDecodeError\nfrom .encoder import JSONEncoder\nimport codecs\n\n_default_encoder=JSONEncoder(\nskipkeys=False ,\nensure_ascii=True ,\ncheck_circular=True ,\nallow_nan=True ,\nindent=None ,\nseparators=None ,\ndefault=None ,\n)\n\ndef dump(obj,fp,*,skipkeys=False ,ensure_ascii=True ,check_circular=True ,\nallow_nan=True ,cls=None ,indent=None ,separators=None ,\ndefault=None ,sort_keys=False ,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if (not skipkeys and ensure_ascii and\n check_circular and allow_nan and\n cls is None and indent is None and separators is None and\n default is None and not sort_keys and not kw):\n iterable=_default_encoder.iterencode(obj)\n else :\n if cls is None :\n cls=JSONEncoder\n iterable=cls(skipkeys=skipkeys,ensure_ascii=ensure_ascii,\n check_circular=check_circular,allow_nan=allow_nan,indent=indent,\n separators=separators,\n default=default,sort_keys=sort_keys,**kw).iterencode(obj)\n \n \n for chunk in iterable:\n fp.write(chunk)\n \n \ndef dumps(obj,*,skipkeys=False ,ensure_ascii=True ,check_circular=True ,\nallow_nan=True ,cls=None ,indent=None ,separators=None ,\ndefault=None ,sort_keys=False ,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if (not skipkeys and ensure_ascii and\n check_circular and allow_nan and\n cls is None and indent is None and separators is None and\n default is None and not sort_keys and not kw):\n return _default_encoder.encode(obj)\n if cls is None :\n cls=JSONEncoder\n return cls(\n skipkeys=skipkeys,ensure_ascii=ensure_ascii,\n check_circular=check_circular,allow_nan=allow_nan,indent=indent,\n separators=separators,default=default,sort_keys=sort_keys,\n **kw).encode(obj)\n \n \n_default_decoder=JSONDecoder(object_hook=None ,object_pairs_hook=None )\n\n\ndef detect_encoding(b):\n bstartswith=b.startswith\n if bstartswith((codecs.BOM_UTF32_BE,codecs.BOM_UTF32_LE)):\n return 'utf-32'\n if bstartswith((codecs.BOM_UTF16_BE,codecs.BOM_UTF16_LE)):\n return 'utf-16'\n if bstartswith(codecs.BOM_UTF8):\n return 'utf-8-sig'\n \n if len(b)>=4:\n if not b[0]:\n \n \n return 'utf-16-be'if b[1]else 'utf-32-be'\n if not b[1]:\n \n \n \n return 'utf-16-le'if b[2]or b[3]else 'utf-32-le'\n elif len(b)==2:\n if not b[0]:\n \n return 'utf-16-be'\n if not b[1]:\n \n return 'utf-16-le'\n \n return 'utf-8'\n \n \ndef load(fp,*,cls=None ,object_hook=None ,parse_float=None ,\nparse_int=None ,parse_constant=None ,object_pairs_hook=None ,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return loads(fp.read(),\n cls=cls,object_hook=object_hook,\n parse_float=parse_float,parse_int=parse_int,\n parse_constant=parse_constant,object_pairs_hook=object_pairs_hook,**kw)\n \n \ndef loads(s,*,encoding=None ,cls=None ,object_hook=None ,parse_float=None ,\nparse_int=None ,parse_constant=None ,object_pairs_hook=None ,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(s,str):\n if s.startswith('\\ufeff'):\n raise JSONDecodeError(\"Unexpected UTF-8 BOM (decode using utf-8-sig)\",\n s,0)\n else :\n if not isinstance(s,(bytes,bytearray)):\n raise TypeError(f'the JSON object must be str, bytes or bytearray, '\n f'not {s.__class__.__name__}')\n s=s.decode(detect_encoding(s),'surrogatepass')\n \n if (cls is None and object_hook is None and\n parse_int is None and parse_float is None and\n parse_constant is None and object_pairs_hook is None and not kw):\n return _default_decoder.decode(s)\n if cls is None :\n cls=JSONDecoder\n if object_hook is not None :\n kw['object_hook']=object_hook\n if object_pairs_hook is not None :\n kw['object_pairs_hook']=object_pairs_hook\n if parse_float is not None :\n kw['parse_float']=parse_float\n if parse_int is not None :\n kw['parse_int']=parse_int\n if parse_constant is not None :\n kw['parse_constant']=parse_constant\n return cls(**kw).decode(s)\n", ["codecs", "json.decoder", "json.encoder"], 1], "logging.brython_handlers": [".py", "import logging\n\nfrom browser.ajax import ajax\n\n\nclass XMLHTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,url,method=\"GET\"):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in [\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n self.url=url\n self.method=method\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n req=ajax.open(self.method,self.url,sync=False )\n req.send(self.mapLogRecord(record))\n except :\n self.handleError(record)\n", ["browser.ajax", "logging"]], "logging.config": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nConfiguration functions for the logging package for Python. The core package\nis based on PEP 282 and comments thereto in comp.lang.python, and influenced\nby Apache's log4j system.\n\nCopyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport sys,logging,logging.handlers,socket,struct,traceback,re\nimport io\n\ntry :\n import _thread as thread\n import threading\nexcept ImportError:\n thread=None\n \nfrom socketserver import ThreadingTCPServer,StreamRequestHandler\n\n\nDEFAULT_LOGGING_CONFIG_PORT=9030\n\nif sys.platform ==\"win32\":\n RESET_ERROR=10054\nelse :\n RESET_ERROR=104\n \n \n \n \n \n \n_listener=None\n\ndef fileConfig(fname,defaults=None ,disable_existing_loggers=True ):\n ''\n\n\n\n\n\n\n \n import configparser\n \n cp=configparser.ConfigParser(defaults)\n if hasattr(fname,'readline'):\n cp.read_file(fname)\n else :\n cp.read(fname)\n \n formatters=_create_formatters(cp)\n \n \n logging._acquireLock()\n try :\n logging._handlers.clear()\n del logging._handlerList[:]\n \n handlers=_install_handlers(cp,formatters)\n _install_loggers(cp,handlers,disable_existing_loggers)\n finally :\n logging._releaseLock()\n \n \ndef _resolve(name):\n ''\n name=name.split('.')\n used=name.pop(0)\n found=__import__(used)\n for n in name:\n used=used+'.'+n\n try :\n found=getattr(found,n)\n except AttributeError:\n __import__(used)\n found=getattr(found,n)\n return found\n \ndef _strip_spaces(alist):\n return map(lambda x:x.strip(),alist)\n \ndef _create_formatters(cp):\n ''\n flist=cp[\"formatters\"][\"keys\"]\n if not len(flist):\n return {}\n flist=flist.split(\",\")\n flist=_strip_spaces(flist)\n formatters={}\n for form in flist:\n sectname=\"formatter_%s\"%form\n fs=cp.get(sectname,\"format\",raw=True ,fallback=None )\n dfs=cp.get(sectname,\"datefmt\",raw=True ,fallback=None )\n c=logging.Formatter\n class_name=cp[sectname].get(\"class\")\n if class_name:\n c=_resolve(class_name)\n f=c(fs,dfs)\n formatters[form]=f\n return formatters\n \n \ndef _install_handlers(cp,formatters):\n ''\n hlist=cp[\"handlers\"][\"keys\"]\n if not len(hlist):\n return {}\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n handlers={}\n fixups=[]\n for hand in hlist:\n section=cp[\"handler_%s\"%hand]\n klass=section[\"class\"]\n fmt=section.get(\"formatter\",\"\")\n try :\n klass=eval(klass,vars(logging))\n except (AttributeError,NameError):\n klass=_resolve(klass)\n args=section[\"args\"]\n args=eval(args,vars(logging))\n h=klass(*args)\n if \"level\"in section:\n level=section[\"level\"]\n h.setLevel(logging._levelNames[level])\n if len(fmt):\n h.setFormatter(formatters[fmt])\n if issubclass(klass,logging.handlers.MemoryHandler):\n target=section.get(\"target\",\"\")\n if len(target):\n fixups.append((h,target))\n handlers[hand]=h\n \n for h,t in fixups:\n h.setTarget(handlers[t])\n return handlers\n \ndef _handle_existing_loggers(existing,child_loggers,disable_existing):\n ''\n\n\n\n\n\n\n\n\n \n root=logging.root\n for log in existing:\n logger=root.manager.loggerDict[log]\n if log in child_loggers:\n logger.level=logging.NOTSET\n logger.handlers=[]\n logger.propagate=True\n else :\n logger.disabled=disable_existing\n \ndef _install_loggers(cp,handlers,disable_existing):\n ''\n \n \n llist=cp[\"loggers\"][\"keys\"]\n llist=llist.split(\",\")\n llist=list(map(lambda x:x.strip(),llist))\n llist.remove(\"root\")\n section=cp[\"logger_root\"]\n root=logging.root\n log=root\n if \"level\"in section:\n level=section[\"level\"]\n log.setLevel(logging._levelNames[level])\n for h in root.handlers[:]:\n root.removeHandler(h)\n hlist=section[\"handlers\"]\n if len(hlist):\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n for hand in hlist:\n log.addHandler(handlers[hand])\n \n \n \n \n \n \n \n \n \n \n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n for log in llist:\n section=cp[\"logger_%s\"%log]\n qn=section[\"qualname\"]\n propagate=section.getint(\"propagate\",fallback=1)\n logger=logging.getLogger(qn)\n if qn in existing:\n i=existing.index(qn)+1\n prefixed=qn+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i [a-z]+)://(?P.*)$')\n \n WORD_PATTERN=re.compile(r'^\\s*(\\w+)\\s*')\n DOT_PATTERN=re.compile(r'^\\.\\s*(\\w+)\\s*')\n INDEX_PATTERN=re.compile(r'^\\[\\s*(\\w+)\\s*\\]\\s*')\n DIGIT_PATTERN=re.compile(r'^\\d+$')\n \n value_converters={\n 'ext':'ext_convert',\n 'cfg':'cfg_convert',\n }\n \n \n importer=staticmethod(__import__)\n \n def __init__(self,config):\n self.config=ConvertingDict(config)\n self.config.configurator=self\n \n def resolve(self,s):\n ''\n\n\n \n name=s.split('.')\n used=name.pop(0)\n try :\n found=self.importer(used)\n for frag in name:\n used +='.'+frag\n try :\n found=getattr(found,frag)\n except AttributeError:\n self.importer(used)\n found=getattr(found,frag)\n return found\n except ImportError:\n e,tb=sys.exc_info()[1:]\n v=ValueError('Cannot resolve %r: %s'%(s,e))\n v.__cause__,v.__traceback__=e,tb\n raise v\n \n def ext_convert(self,value):\n ''\n return self.resolve(value)\n \n def cfg_convert(self,value):\n ''\n rest=value\n m=self.WORD_PATTERN.match(rest)\n if m is None :\n raise ValueError(\"Unable to convert %r\"%value)\n else :\n rest=rest[m.end():]\n d=self.config[m.groups()[0]]\n \n while rest:\n m=self.DOT_PATTERN.match(rest)\n if m:\n d=d[m.groups()[0]]\n else :\n m=self.INDEX_PATTERN.match(rest)\n if m:\n idx=m.groups()[0]\n if not self.DIGIT_PATTERN.match(idx):\n d=d[idx]\n else :\n try :\n n=int(idx)\n d=d[n]\n except TypeError:\n d=d[idx]\n if m:\n rest=rest[m.end():]\n else :\n raise ValueError('Unable to convert '\n '%r at %r'%(value,rest))\n \n return d\n \n def convert(self,value):\n ''\n\n\n\n \n if not isinstance(value,ConvertingDict)and isinstance(value,dict):\n value=ConvertingDict(value)\n value.configurator=self\n elif not isinstance(value,ConvertingList)and isinstance(value,list):\n value=ConvertingList(value)\n value.configurator=self\n elif not isinstance(value,ConvertingTuple)and\\\n isinstance(value,tuple):\n value=ConvertingTuple(value)\n value.configurator=self\n elif isinstance(value,str):\n m=self.CONVERT_PATTERN.match(value)\n if m:\n d=m.groupdict()\n prefix=d['prefix']\n converter=self.value_converters.get(prefix,None )\n if converter:\n suffix=d['suffix']\n converter=getattr(self,converter)\n value=converter(suffix)\n return value\n \n def configure_custom(self,config):\n ''\n c=config.pop('()')\n if not callable(c):\n c=self.resolve(c)\n props=config.pop('.',None )\n \n kwargs=dict([(k,config[k])for k in config if valid_ident(k)])\n result=c(**kwargs)\n if props:\n for name,value in props.items():\n setattr(result,name,value)\n return result\n \n def as_tuple(self,value):\n ''\n if isinstance(value,list):\n value=tuple(value)\n return value\n \nclass DictConfigurator(BaseConfigurator):\n ''\n\n\n \n \n def configure(self):\n ''\n \n config=self.config\n if 'version'not in config:\n raise ValueError(\"dictionary doesn't specify a version\")\n if config['version']!=1:\n raise ValueError(\"Unsupported version: %s\"%config['version'])\n incremental=config.pop('incremental',False )\n EMPTY_DICT={}\n logging._acquireLock()\n try :\n if incremental:\n handlers=config.get('handlers',EMPTY_DICT)\n for name in handlers:\n if name not in logging._handlers:\n raise ValueError('No handler found with '\n 'name %r'%name)\n else :\n try :\n handler=logging._handlers[name]\n handler_config=handlers[name]\n level=handler_config.get('level',None )\n if level:\n handler.setLevel(logging._checkLevel(level))\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r: %s'%(name,e))\n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n try :\n self.configure_logger(name,loggers[name],True )\n except Exception as e:\n raise ValueError('Unable to configure logger '\n '%r: %s'%(name,e))\n root=config.get('root',None )\n if root:\n try :\n self.configure_root(root,True )\n except Exception as e:\n raise ValueError('Unable to configure root '\n 'logger: %s'%e)\n else :\n disable_existing=config.pop('disable_existing_loggers',True )\n \n logging._handlers.clear()\n del logging._handlerList[:]\n \n \n formatters=config.get('formatters',EMPTY_DICT)\n for name in formatters:\n try :\n formatters[name]=self.configure_formatter(\n formatters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'formatter %r: %s'%(name,e))\n \n filters=config.get('filters',EMPTY_DICT)\n for name in filters:\n try :\n filters[name]=self.configure_filter(filters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'filter %r: %s'%(name,e))\n \n \n \n \n handlers=config.get('handlers',EMPTY_DICT)\n deferred=[]\n for name in sorted(handlers):\n try :\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n if 'target not configured yet'in str(e):\n deferred.append(name)\n else :\n raise ValueError('Unable to configure handler '\n '%r: %s'%(name,e))\n \n \n for name in deferred:\n try :\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r: %s'%(name,e))\n \n \n \n \n \n \n \n \n \n \n \n root=logging.root\n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n if name in existing:\n i=existing.index(name)+1\n prefixed=name+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i L\",chunk)[0]\n chunk=self.connection.recv(slen)\n while len(chunk)0:\n mode='a'\n BaseRotatingHandler.__init__(self,filename,mode,encoding,delay)\n self.maxBytes=maxBytes\n self.backupCount=backupCount\n \n def doRollover(self):\n ''\n\n \n if self.stream:\n self.stream.close()\n self.stream=None\n if self.backupCount >0:\n for i in range(self.backupCount -1,0,-1):\n sfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,i))\n dfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,\n i+1))\n if os.path.exists(sfn):\n if os.path.exists(dfn):\n os.remove(dfn)\n os.rename(sfn,dfn)\n dfn=self.rotation_filename(self.baseFilename+\".1\")\n if os.path.exists(dfn):\n os.remove(dfn)\n self.rotate(self.baseFilename,dfn)\n if not self.delay:\n self.stream=self._open()\n \n def shouldRollover(self,record):\n ''\n\n\n\n\n \n if self.stream is None :\n self.stream=self._open()\n if self.maxBytes >0:\n msg=\"%s\\n\"%self.format(record)\n self.stream.seek(0,2)\n if self.stream.tell()+len(msg)>=self.maxBytes:\n return 1\n return 0\n \nclass TimedRotatingFileHandler(BaseRotatingHandler):\n ''\n\n\n\n\n\n \n def __init__(self,filename,when='h',interval=1,backupCount=0,encoding=None ,delay=False ,utc=False ):\n BaseRotatingHandler.__init__(self,filename,'a',encoding,delay)\n self.when=when.upper()\n self.backupCount=backupCount\n self.utc=utc\n \n \n \n \n \n \n \n \n \n \n \n \n if self.when =='S':\n self.interval=1\n self.suffix=\"%Y-%m-%d_%H-%M-%S\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='M':\n self.interval=60\n self.suffix=\"%Y-%m-%d_%H-%M\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='H':\n self.interval=60 *60\n self.suffix=\"%Y-%m-%d_%H\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}(\\.\\w+)?$\"\n elif self.when =='D'or self.when =='MIDNIGHT':\n self.interval=60 *60 *24\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when.startswith('W'):\n self.interval=60 *60 *24 *7\n if len(self.when)!=2:\n raise ValueError(\"You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s\"%self.when)\n if self.when[1]<'0'or self.when[1]>'6':\n raise ValueError(\"Invalid day specified for weekly rollover: %s\"%self.when)\n self.dayOfWeek=int(self.when[1])\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n else :\n raise ValueError(\"Invalid rollover interval specified: %s\"%self.when)\n \n self.extMatch=re.compile(self.extMatch,re.ASCII)\n self.interval=self.interval *interval\n if os.path.exists(filename):\n t=os.stat(filename)[ST_MTIME]\n else :\n t=int(time.time())\n self.rolloverAt=self.computeRollover(t)\n \n def computeRollover(self,currentTime):\n ''\n\n \n result=currentTime+self.interval\n \n \n \n \n \n \n \n if self.when =='MIDNIGHT'or self.when.startswith('W'):\n \n if self.utc:\n t=time.gmtime(currentTime)\n else :\n t=time.localtime(currentTime)\n currentHour=t[3]\n currentMinute=t[4]\n currentSecond=t[5]\n \n r=_MIDNIGHT -((currentHour *60+currentMinute)*60+\n currentSecond)\n result=currentTime+r\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.when.startswith('W'):\n day=t[6]\n if day !=self.dayOfWeek:\n if day =self.rolloverAt:\n return 1\n return 0\n \n def getFilesToDelete(self):\n ''\n\n\n\n \n dirName,baseName=os.path.split(self.baseFilename)\n fileNames=os.listdir(dirName)\n result=[]\n prefix=baseName+\".\"\n plen=len(prefix)\n for fileName in fileNames:\n if fileName[:plen]==prefix:\n suffix=fileName[plen:]\n if self.extMatch.match(suffix):\n result.append(os.path.join(dirName,fileName))\n result.sort()\n if len(result)0:\n for s in self.getFilesToDelete():\n os.remove(s)\n if not self.delay:\n self.stream=self._open()\n newRolloverAt=self.computeRollover(currentTime)\n while newRolloverAt <=currentTime:\n newRolloverAt=newRolloverAt+self.interval\n \n if (self.when =='MIDNIGHT'or self.when.startswith('W'))and not self.utc:\n dstAtRollover=time.localtime(newRolloverAt)[-1]\n if dstNow !=dstAtRollover:\n if not dstNow:\n addend=-3600\n else :\n addend=3600\n newRolloverAt +=addend\n self.rolloverAt=newRolloverAt\n \nclass WatchedFileHandler(logging.FileHandler):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,filename,mode='a',encoding=None ,delay=False ):\n logging.FileHandler.__init__(self,filename,mode,encoding,delay)\n self.dev,self.ino=-1,-1\n self._statstream()\n \n def _statstream(self):\n if self.stream:\n sres=os.fstat(self.stream.fileno())\n self.dev,self.ino=sres[ST_DEV],sres[ST_INO]\n \n def emit(self,record):\n ''\n\n\n\n\n\n \n \n \n \n \n try :\n \n sres=os.stat(self.baseFilename)\n except OSError as err:\n if err.errno ==errno.ENOENT:\n sres=None\n else :\n raise\n \n if not sres or sres[ST_DEV]!=self.dev or sres[ST_INO]!=self.ino:\n if self.stream is not None :\n \n self.stream.flush()\n self.stream.close()\n \n self.stream=self._open()\n self._statstream()\n logging.FileHandler.emit(self,record)\n \n \nclass SocketHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,host,port):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n self.host=host\n self.port=port\n self.sock=None\n self.closeOnError=False\n self.retryTime=None\n \n \n \n self.retryStart=1.0\n self.retryMax=30.0\n self.retryFactor=2.0\n \n def makeSocket(self,timeout=1):\n ''\n\n\n \n s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)\n if hasattr(s,'settimeout'):\n s.settimeout(timeout)\n try :\n s.connect((self.host,self.port))\n return s\n except socket.error:\n s.close()\n raise\n \n def createSocket(self):\n ''\n\n\n\n \n now=time.time()\n \n \n \n if self.retryTime is None :\n attempt=True\n else :\n attempt=(now >=self.retryTime)\n if attempt:\n try :\n self.sock=self.makeSocket()\n self.retryTime=None\n except socket.error:\n \n if self.retryTime is None :\n self.retryPeriod=self.retryStart\n else :\n self.retryPeriod=self.retryPeriod *self.retryFactor\n if self.retryPeriod >self.retryMax:\n self.retryPeriod=self.retryMax\n self.retryTime=now+self.retryPeriod\n \n def send(self,s):\n ''\n\n\n\n\n \n if self.sock is None :\n self.createSocket()\n \n \n \n if self.sock:\n try :\n if hasattr(self.sock,\"sendall\"):\n self.sock.sendall(s)\n else :\n sentsofar=0\n left=len(s)\n while left >0:\n sent=self.sock.send(s[sentsofar:])\n sentsofar=sentsofar+sent\n left=left -sent\n except socket.error:\n self.sock.close()\n self.sock=None\n \n def makePickle(self,record):\n ''\n\n\n \n ei=record.exc_info\n if ei:\n \n dummy=self.format(record)\n \n \n \n d=dict(record.__dict__)\n d['msg']=record.getMessage()\n d['args']=None\n d['exc_info']=None\n s=pickle.dumps(d,1)\n slen=struct.pack(\">L\",len(s))\n return slen+s\n \n def handleError(self,record):\n ''\n\n\n\n\n\n \n if self.closeOnError and self.sock:\n self.sock.close()\n self.sock=None\n else :\n logging.Handler.handleError(self,record)\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try :\n s=self.makePickle(record)\n self.send(s)\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n if self.sock:\n self.sock.close()\n self.sock=None\n logging.Handler.close(self)\n finally :\n self.release()\n \nclass DatagramHandler(SocketHandler):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,host,port):\n ''\n\n \n SocketHandler.__init__(self,host,port)\n self.closeOnError=False\n \n def makeSocket(self):\n ''\n\n\n \n s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)\n return s\n \n def send(self,s):\n ''\n\n\n\n\n\n \n if self.sock is None :\n self.createSocket()\n self.sock.sendto(s,(self.host,self.port))\n \nclass SysLogHandler(logging.Handler):\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n LOG_EMERG=0\n LOG_ALERT=1\n LOG_CRIT=2\n LOG_ERR=3\n LOG_WARNING=4\n LOG_NOTICE=5\n LOG_INFO=6\n LOG_DEBUG=7\n \n \n LOG_KERN=0\n LOG_USER=1\n LOG_MAIL=2\n LOG_DAEMON=3\n LOG_AUTH=4\n LOG_SYSLOG=5\n LOG_LPR=6\n LOG_NEWS=7\n LOG_UUCP=8\n LOG_CRON=9\n LOG_AUTHPRIV=10\n LOG_FTP=11\n \n \n LOG_LOCAL0=16\n LOG_LOCAL1=17\n LOG_LOCAL2=18\n LOG_LOCAL3=19\n LOG_LOCAL4=20\n LOG_LOCAL5=21\n LOG_LOCAL6=22\n LOG_LOCAL7=23\n \n priority_names={\n \"alert\":LOG_ALERT,\n \"crit\":LOG_CRIT,\n \"critical\":LOG_CRIT,\n \"debug\":LOG_DEBUG,\n \"emerg\":LOG_EMERG,\n \"err\":LOG_ERR,\n \"error\":LOG_ERR,\n \"info\":LOG_INFO,\n \"notice\":LOG_NOTICE,\n \"panic\":LOG_EMERG,\n \"warn\":LOG_WARNING,\n \"warning\":LOG_WARNING,\n }\n \n facility_names={\n \"auth\":LOG_AUTH,\n \"authpriv\":LOG_AUTHPRIV,\n \"cron\":LOG_CRON,\n \"daemon\":LOG_DAEMON,\n \"ftp\":LOG_FTP,\n \"kern\":LOG_KERN,\n \"lpr\":LOG_LPR,\n \"mail\":LOG_MAIL,\n \"news\":LOG_NEWS,\n \"security\":LOG_AUTH,\n \"syslog\":LOG_SYSLOG,\n \"user\":LOG_USER,\n \"uucp\":LOG_UUCP,\n \"local0\":LOG_LOCAL0,\n \"local1\":LOG_LOCAL1,\n \"local2\":LOG_LOCAL2,\n \"local3\":LOG_LOCAL3,\n \"local4\":LOG_LOCAL4,\n \"local5\":LOG_LOCAL5,\n \"local6\":LOG_LOCAL6,\n \"local7\":LOG_LOCAL7,\n }\n \n \n \n \n \n priority_map={\n \"DEBUG\":\"debug\",\n \"INFO\":\"info\",\n \"WARNING\":\"warning\",\n \"ERROR\":\"error\",\n \"CRITICAL\":\"critical\"\n }\n \n def __init__(self,address=('localhost',SYSLOG_UDP_PORT),\n facility=LOG_USER,socktype=None ):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n \n self.address=address\n self.facility=facility\n self.socktype=socktype\n \n if isinstance(address,str):\n self.unixsocket=True\n self._connect_unixsocket(address)\n else :\n self.unixsocket=False\n if socktype is None :\n socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_INET,socktype)\n if socktype ==socket.SOCK_STREAM:\n self.socket.connect(address)\n self.socktype=socktype\n self.formatter=None\n \n def _connect_unixsocket(self,address):\n use_socktype=self.socktype\n if use_socktype is None :\n use_socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try :\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except socket.error:\n self.socket.close()\n if self.socktype is not None :\n \n raise\n use_socktype=socket.SOCK_STREAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try :\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except socket.error:\n self.socket.close()\n raise\n \n def encodePriority(self,facility,priority):\n ''\n\n\n\n\n \n if isinstance(facility,str):\n facility=self.facility_names[facility]\n if isinstance(priority,str):\n priority=self.priority_names[priority]\n return (facility <<3)|priority\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n self.socket.close()\n logging.Handler.close(self)\n finally :\n self.release()\n \n def mapPriority(self,levelName):\n ''\n\n\n\n\n\n \n return self.priority_map.get(levelName,\"warning\")\n \n ident=''\n append_nul=True\n \n def emit(self,record):\n ''\n\n\n\n\n \n msg=self.format(record)\n if self.ident:\n msg=self.ident+msg\n if self.append_nul:\n msg +='\\000'\n ''\n\n\n \n prio='<%d>'%self.encodePriority(self.facility,\n self.mapPriority(record.levelname))\n prio=prio.encode('utf-8')\n \n msg=msg.encode('utf-8')\n msg=prio+msg\n try :\n if self.unixsocket:\n try :\n self.socket.send(msg)\n except socket.error:\n self.socket.close()\n self._connect_unixsocket(self.address)\n self.socket.send(msg)\n elif self.socktype ==socket.SOCK_DGRAM:\n self.socket.sendto(msg,self.address)\n else :\n self.socket.sendall(msg)\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass SMTPHandler(logging.Handler):\n ''\n\n \n def __init__(self,mailhost,fromaddr,toaddrs,subject,\n credentials=None ,secure=None ,timeout=5.0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n logging.Handler.__init__(self)\n if isinstance(mailhost,tuple):\n self.mailhost,self.mailport=mailhost\n else :\n self.mailhost,self.mailport=mailhost,None\n if isinstance(credentials,tuple):\n self.username,self.password=credentials\n else :\n self.username=None\n self.fromaddr=fromaddr\n if isinstance(toaddrs,str):\n toaddrs=[toaddrs]\n self.toaddrs=toaddrs\n self.subject=subject\n self.secure=secure\n self.timeout=timeout\n \n def getSubject(self,record):\n ''\n\n\n\n\n \n return self.subject\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n import smtplib\n from email.utils import formatdate\n port=self.mailport\n if not port:\n port=smtplib.SMTP_PORT\n smtp=smtplib.SMTP(self.mailhost,port,timeout=self.timeout)\n msg=self.format(record)\n msg=\"From: %s\\r\\nTo: %s\\r\\nSubject: %s\\r\\nDate: %s\\r\\n\\r\\n%s\"%(\n self.fromaddr,\n \",\".join(self.toaddrs),\n self.getSubject(record),\n formatdate(),msg)\n if self.username:\n if self.secure is not None :\n smtp.ehlo()\n smtp.starttls(*self.secure)\n smtp.ehlo()\n smtp.login(self.username,self.password)\n smtp.sendmail(self.fromaddr,self.toaddrs,msg)\n smtp.quit()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass NTEventLogHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n def __init__(self,appname,dllname=None ,logtype=\"Application\"):\n logging.Handler.__init__(self)\n try :\n import win32evtlogutil,win32evtlog\n self.appname=appname\n self._welu=win32evtlogutil\n if not dllname:\n dllname=os.path.split(self._welu.__file__)\n dllname=os.path.split(dllname[0])\n dllname=os.path.join(dllname[0],r'win32service.pyd')\n self.dllname=dllname\n self.logtype=logtype\n self._welu.AddSourceToRegistry(appname,dllname,logtype)\n self.deftype=win32evtlog.EVENTLOG_ERROR_TYPE\n self.typemap={\n logging.DEBUG:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.INFO:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.WARNING:win32evtlog.EVENTLOG_WARNING_TYPE,\n logging.ERROR:win32evtlog.EVENTLOG_ERROR_TYPE,\n logging.CRITICAL:win32evtlog.EVENTLOG_ERROR_TYPE,\n }\n except ImportError:\n print(\"The Python Win32 extensions for NT (service, event \"\\\n \"logging) appear not to be available.\")\n self._welu=None\n \n def getMessageID(self,record):\n ''\n\n\n\n\n\n \n return 1\n \n def getEventCategory(self,record):\n ''\n\n\n\n\n \n return 0\n \n def getEventType(self,record):\n ''\n\n\n\n\n\n\n\n\n \n return self.typemap.get(record.levelno,self.deftype)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self._welu:\n try :\n id=self.getMessageID(record)\n cat=self.getEventCategory(record)\n type=self.getEventType(record)\n msg=self.format(record)\n self._welu.ReportEvent(self.appname,id,cat,type,[msg])\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \n def close(self):\n ''\n\n\n\n\n\n\n\n \n \n logging.Handler.close(self)\n \nclass HTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,host,url,method=\"GET\",secure=False ,credentials=None ):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in [\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n self.host=host\n self.url=url\n self.method=method\n self.secure=secure\n self.credentials=credentials\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n import http.client,urllib.parse\n host=self.host\n if self.secure:\n h=http.client.HTTPSConnection(host)\n else :\n h=http.client.HTTPConnection(host)\n url=self.url\n data=urllib.parse.urlencode(self.mapLogRecord(record))\n if self.method ==\"GET\":\n if (url.find('?')>=0):\n sep='&'\n else :\n sep='?'\n url=url+\"%c%s\"%(sep,data)\n h.putrequest(self.method,url)\n \n \n i=host.find(\":\")\n if i >=0:\n host=host[:i]\n h.putheader(\"Host\",host)\n if self.method ==\"POST\":\n h.putheader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n h.putheader(\"Content-length\",str(len(data)))\n if self.credentials:\n import base64\n s=('u%s:%s'%self.credentials).encode('utf-8')\n s='Basic '+base64.b64encode(s).strip()\n h.putheader('Authorization',s)\n h.endheaders()\n if self.method ==\"POST\":\n h.send(data.encode('utf-8'))\n h.getresponse()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass BufferingHandler(logging.Handler):\n ''\n\n\n\n \n def __init__(self,capacity):\n ''\n\n \n logging.Handler.__init__(self)\n self.capacity=capacity\n self.buffer=[]\n \n def shouldFlush(self,record):\n ''\n\n\n\n\n \n return (len(self.buffer)>=self.capacity)\n \n def emit(self,record):\n ''\n\n\n\n\n \n self.buffer.append(record)\n if self.shouldFlush(record):\n self.flush()\n \n def flush(self):\n ''\n\n\n\n \n self.acquire()\n try :\n self.buffer=[]\n finally :\n self.release()\n \n def close(self):\n ''\n\n\n\n \n self.flush()\n logging.Handler.close(self)\n \nclass MemoryHandler(BufferingHandler):\n ''\n\n\n\n \n def __init__(self,capacity,flushLevel=logging.ERROR,target=None ):\n ''\n\n\n\n\n\n \n BufferingHandler.__init__(self,capacity)\n self.flushLevel=flushLevel\n self.target=target\n \n def shouldFlush(self,record):\n ''\n\n \n return (len(self.buffer)>=self.capacity)or\\\n (record.levelno >=self.flushLevel)\n \n def setTarget(self,target):\n ''\n\n \n self.target=target\n \n def flush(self):\n ''\n\n\n\n\n\n \n self.acquire()\n try :\n if self.target:\n for record in self.buffer:\n self.target.handle(record)\n self.buffer=[]\n finally :\n self.release()\n \n def close(self):\n ''\n\n \n self.flush()\n self.acquire()\n try :\n self.target=None\n BufferingHandler.close(self)\n finally :\n self.release()\n \n \nclass QueueHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,queue):\n ''\n\n \n logging.Handler.__init__(self)\n self.queue=queue\n \n def enqueue(self,record):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(record)\n \n def prepare(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n self.format(record)\n record.msg=record.message\n record.args=None\n record.exc_info=None\n return record\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n self.enqueue(self.prepare(record))\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nif threading:\n class QueueListener(object):\n ''\n\n\n\n \n _sentinel=None\n \n def __init__(self,queue,*handlers):\n ''\n\n\n \n self.queue=queue\n self.handlers=handlers\n self._stop=threading.Event()\n self._thread=None\n \n def dequeue(self,block):\n ''\n\n\n\n\n \n return self.queue.get(block)\n \n def start(self):\n ''\n\n\n\n\n \n self._thread=t=threading.Thread(target=self._monitor)\n t.setDaemon(True )\n t.start()\n \n def prepare(self,record):\n ''\n\n\n\n\n\n \n return record\n \n def handle(self,record):\n ''\n\n\n\n\n \n record=self.prepare(record)\n for handler in self.handlers:\n handler.handle(record)\n \n def _monitor(self):\n ''\n\n\n\n\n\n \n q=self.queue\n has_task_done=hasattr(q,'task_done')\n while not self._stop.isSet():\n try :\n record=self.dequeue(True )\n if record is self._sentinel:\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n pass\n \n while True :\n try :\n record=self.dequeue(False )\n if record is self._sentinel:\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n break\n \n def enqueue_sentinel(self):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(self._sentinel)\n \n def stop(self):\n ''\n\n\n\n\n\n \n self._stop.set()\n self.enqueue_sentinel()\n self._thread.join()\n self._thread=None\n", ["base64", "codecs", "email.utils", "errno", "http.client", "logging", "logging.brython_handlers", "os", "pickle", "queue", "re", "smtplib", "socket", "stat", "struct", "threading", "time", "urllib.parse", "win32evtlog", "win32evtlogutil"]], "logging": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nLogging package for Python. Based on PEP 282 and comments thereto in\ncomp.lang.python.\n\nCopyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport sys,os,time,io,traceback,warnings,weakref\nfrom string import Template\nfrom browser import console\n\n__all__=['BASIC_FORMAT','BufferingFormatter','CRITICAL','DEBUG','ERROR',\n'FATAL','FileHandler','Filter','Formatter','Handler','INFO',\n'LogRecord','Logger','LoggerAdapter','NOTSET','NullHandler',\n'StreamHandler','ConsoleHandler','WARN','WARNING','addLevelName','basicConfig',\n'captureWarnings','critical','debug','disable','error',\n'exception','fatal','getLevelName','getLogger','getLoggerClass',\n'info','log','makeLogRecord','setLoggerClass','warn','warning',\n'getLogRecordFactory','setLogRecordFactory','lastResort']\n\ntry :\n import threading\nexcept ImportError:\n threading=None\n \n__author__=\"Vinay Sajip \"\n__status__=\"production\"\n__version__=\"0.5.1.2\"\n__date__=\"07 February 2010\"\n\n\n\n\n\n\n\n\n\nif hasattr(sys,'frozen'):\n _srcfile=\"logging%s__init__%s\"%(os.sep,__file__[-4:])\nelse :\n _srcfile=__file__\n_srcfile=os.path.normcase(_srcfile)\n\n\nif hasattr(sys,'_getframe'):\n currentframe=lambda :sys._getframe(3)\nelse :\n def currentframe():\n ''\n try :\n raise Exception\n except :\n return sys.exc_info()[2].tb_frame.f_back\n \n \n \n \n \n \n \n \n \n \n \n_startTime=time.time()\n\n\n\n\n\nraiseExceptions=True\n\n\n\n\nlogThreads=True\n\n\n\n\nlogMultiprocessing=True\n\n\n\n\nlogProcesses=True\n\n\n\n\n\n\n\n\n\n\n\n\nCRITICAL=50\nFATAL=CRITICAL\nERROR=40\nWARNING=30\nWARN=WARNING\nINFO=20\nDEBUG=10\nNOTSET=0\n\n_levelNames={\nCRITICAL:'CRITICAL',\nERROR:'ERROR',\nWARNING:'WARNING',\nINFO:'INFO',\nDEBUG:'DEBUG',\nNOTSET:'NOTSET',\n'CRITICAL':CRITICAL,\n'ERROR':ERROR,\n'WARN':WARNING,\n'WARNING':WARNING,\n'INFO':INFO,\n'DEBUG':DEBUG,\n'NOTSET':NOTSET,\n}\n\ndef getLevelName(level):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n return _levelNames.get(level,(\"Level %s\"%level))\n \ndef addLevelName(level,levelName):\n ''\n\n\n\n \n _acquireLock()\n try :\n _levelNames[level]=levelName\n _levelNames[levelName]=level\n finally :\n _releaseLock()\n \ndef _checkLevel(level):\n if isinstance(level,int):\n rv=level\n elif str(level)==level:\n if level not in _levelNames:\n raise ValueError(\"Unknown level: %r\"%level)\n rv=_levelNames[level]\n else :\n raise TypeError(\"Level not an integer or a valid string: %r\"%level)\n return rv\n \n \n \n \n \n \n \n \n \n \n \n \n \nif threading:\n _lock=threading.RLock()\nelse :\n _lock=None\n \n \ndef _acquireLock():\n ''\n\n\n\n \n if _lock:\n _lock.acquire()\n \ndef _releaseLock():\n ''\n\n \n if _lock:\n _lock.release()\n \n \n \n \n \nclass LogRecord(object):\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level,pathname,lineno,\n msg,args,exc_info,func=None ,sinfo=None ,**kwargs):\n ''\n\n \n ct=time.time()\n self.name=name\n self.msg=msg\n \n \n \n \n \n \n \n \n \n \n \n \n \n if args and len(args)==1 and isinstance(args[0],dict)and args[0]:\n args=args[0]\n self.args=args\n self.levelname=getLevelName(level)\n self.levelno=level\n self.pathname=pathname\n try :\n self.filename=os.path.basename(pathname)\n self.module=os.path.splitext(self.filename)[0]\n except (TypeError,ValueError,AttributeError):\n self.filename=pathname\n self.module=\"Unknown module\"\n self.exc_info=exc_info\n self.exc_text=None\n self.stack_info=sinfo\n self.lineno=lineno\n self.funcName=func\n self.created=ct\n self.msecs=(ct -int(ct))*1000\n self.relativeCreated=(self.created -_startTime)*1000\n if logThreads and threading:\n self.thread=threading.get_ident()\n self.threadName=threading.current_thread().name\n else :\n self.thread=None\n self.threadName=None\n if not logMultiprocessing:\n self.processName=None\n else :\n self.processName='MainProcess'\n mp=sys.modules.get('multiprocessing')\n if mp is not None :\n \n \n \n \n try :\n self.processName=mp.current_process().name\n except Exception:\n pass\n if logProcesses and hasattr(os,'getpid'):\n self.process=os.getpid()\n else :\n self.process=None\n \n def __str__(self):\n return ''%(self.name,self.levelno,\n self.pathname,self.lineno,self.msg)\n \n def getMessage(self):\n ''\n\n\n\n\n \n msg=str(self.msg)\n if self.args:\n msg=msg %self.args\n return msg\n \n \n \n \n_logRecordFactory=LogRecord\n\ndef setLogRecordFactory(factory):\n ''\n\n\n\n\n \n global _logRecordFactory\n _logRecordFactory=factory\n \ndef getLogRecordFactory():\n ''\n\n \n \n return _logRecordFactory\n \ndef makeLogRecord(dict):\n ''\n\n\n\n\n \n rv=_logRecordFactory(None ,None ,\"\",0,\"\",(),None ,None )\n rv.__dict__.update(dict)\n return rv\n \n \n \n \n \nclass PercentStyle(object):\n\n default_format='%(message)s'\n asctime_format='%(asctime)s'\n asctime_search='%(asctime)'\n \n def __init__(self,fmt):\n self._fmt=fmt or self.default_format\n \n def usesTime(self):\n return self._fmt.find(self.asctime_search)>=0\n \n def format(self,record):\n return self._fmt %record.__dict__\n \nclass StrFormatStyle(PercentStyle):\n default_format='{message}'\n asctime_format='{asctime}'\n asctime_search='{asctime'\n \n def format(self,record):\n return self._fmt.format(**record.__dict__)\n \n \nclass StringTemplateStyle(PercentStyle):\n default_format='${message}'\n asctime_format='${asctime}'\n asctime_search='${asctime}'\n \n def __init__(self,fmt):\n self._fmt=fmt or self.default_format\n self._tpl=Template(self._fmt)\n \n def usesTime(self):\n fmt=self._fmt\n return fmt.find('$asctime')>=0 or fmt.find(self.asctime_format)>=0\n \n def format(self,record):\n return self._tpl.substitute(**record.__dict__)\n \n_STYLES={\n'%':PercentStyle,\n'{':StrFormatStyle,\n'$':StringTemplateStyle\n}\n\nclass Formatter(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n converter=time.localtime\n \n def __init__(self,fmt=None ,datefmt=None ,style='%'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if style not in _STYLES:\n raise ValueError('Style must be one of: %s'%','.join(\n _STYLES.keys()))\n self._style=_STYLES[style](fmt)\n self._fmt=self._style._fmt\n self.datefmt=datefmt\n \n default_time_format='%Y-%m-%d %H:%M:%S'\n default_msec_format='%s,%03d'\n \n def formatTime(self,record,datefmt=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ct=self.converter(record.created)\n if datefmt:\n s=time.strftime(datefmt,ct)\n else :\n t=time.strftime(self.default_time_format,ct)\n s=self.default_msec_format %(t,record.msecs)\n return s\n \n def formatException(self,ei):\n ''\n\n\n\n\n \n sio=io.StringIO()\n tb=ei[2]\n \n \n \n traceback.print_exc(file=sio)\n s=sio.getvalue()\n sio.close()\n if s[-1:]==\"\\n\":\n s=s[:-1]\n return s\n \n def usesTime(self):\n ''\n\n \n return self._style.usesTime()\n \n def formatMessage(self,record):\n return self._style.format(record)\n \n def formatStack(self,stack_info):\n ''\n\n\n\n\n\n\n\n\n \n return stack_info\n \n def format(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n record.message=record.getMessage()\n if self.usesTime():\n record.asctime=self.formatTime(record,self.datefmt)\n s=self.formatMessage(record)\n if record.exc_info:\n \n \n if not record.exc_text:\n record.exc_text=self.formatException(record.exc_info)\n if record.exc_text:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+record.exc_text\n if record.stack_info:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+self.formatStack(record.stack_info)\n return s\n \n \n \n \n_defaultFormatter=Formatter()\n\nclass BufferingFormatter(object):\n ''\n\n \n def __init__(self,linefmt=None ):\n ''\n\n\n \n if linefmt:\n self.linefmt=linefmt\n else :\n self.linefmt=_defaultFormatter\n \n def formatHeader(self,records):\n ''\n\n \n return \"\"\n \n def formatFooter(self,records):\n ''\n\n \n return \"\"\n \n def format(self,records):\n ''\n\n \n rv=\"\"\n if len(records)>0:\n rv=rv+self.formatHeader(records)\n for record in records:\n rv=rv+self.linefmt.format(record)\n rv=rv+self.formatFooter(records)\n return rv\n \n \n \n \n \nclass Filter(object):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,name=''):\n ''\n\n\n\n\n\n \n self.name=name\n self.nlen=len(name)\n \n def filter(self,record):\n ''\n\n\n\n\n \n if self.nlen ==0:\n return True\n elif self.name ==record.name:\n return True\n elif record.name.find(self.name,0,self.nlen)!=0:\n return False\n return (record.name[self.nlen]==\".\")\n \nclass Filterer(object):\n ''\n\n\n \n def __init__(self):\n ''\n\n \n self.filters=[]\n \n def addFilter(self,filter):\n ''\n\n \n if not (filter in self.filters):\n self.filters.append(filter)\n \n def removeFilter(self,filter):\n ''\n\n \n if filter in self.filters:\n self.filters.remove(filter)\n \n def filter(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n rv=True\n for f in self.filters:\n if hasattr(f,'filter'):\n result=f.filter(record)\n else :\n result=f(record)\n if not result:\n rv=False\n break\n return rv\n \n \n \n \n \n_handlers=weakref.WeakValueDictionary()\n_handlerList=[]\n\ndef _removeHandlerRef(wr):\n ''\n\n \n \n \n \n if (_acquireLock is not None and _handlerList is not None and\n _releaseLock is not None ):\n _acquireLock()\n try :\n if wr in _handlerList:\n _handlerList.remove(wr)\n finally :\n _releaseLock()\n \ndef _addHandlerRef(handler):\n ''\n\n \n _acquireLock()\n try :\n _handlerList.append(weakref.ref(handler,_removeHandlerRef))\n finally :\n _releaseLock()\n \nclass Handler(Filterer):\n ''\n\n\n\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n\n \n Filterer.__init__(self)\n self._name=None\n self.level=_checkLevel(level)\n self.formatter=None\n \n _addHandlerRef(self)\n self.createLock()\n \n def get_name(self):\n return self._name\n \n def set_name(self,name):\n _acquireLock()\n try :\n if self._name in _handlers:\n del _handlers[self._name]\n self._name=name\n if name:\n _handlers[name]=self\n finally :\n _releaseLock()\n \n name=property(get_name,set_name)\n \n def createLock(self):\n ''\n\n \n if threading:\n self.lock=threading.RLock()\n else :\n self.lock=None\n \n def acquire(self):\n ''\n\n \n if self.lock:\n self.lock.acquire()\n \n def release(self):\n ''\n\n \n if self.lock:\n self.lock.release()\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def format(self,record):\n ''\n\n\n\n\n \n if self.formatter:\n fmt=self.formatter\n else :\n fmt=_defaultFormatter\n return fmt.format(record)\n \n def emit(self,record):\n ''\n\n\n\n\n \n raise NotImplementedError('emit must be implemented '\n 'by Handler subclasses')\n \n def handle(self,record):\n ''\n\n\n\n\n\n\n \n rv=self.filter(record)\n if rv:\n self.acquire()\n try :\n self.emit(record)\n finally :\n self.release()\n return rv\n \n def setFormatter(self,fmt):\n ''\n\n \n self.formatter=fmt\n \n def flush(self):\n ''\n\n\n\n\n \n pass\n \n def close(self):\n ''\n\n\n\n\n\n\n \n \n _acquireLock()\n try :\n if self._name and self._name in _handlers:\n del _handlers[self._name]\n finally :\n _releaseLock()\n \n def handleError(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n if raiseExceptions and sys.stderr:\n try :\n traceback.print_exc(file=sys.stderr)\n sys.stderr.write('Logged from file %s, line %s\\n'%(\n record.filename,record.lineno))\n except IOError:\n pass\n \nclass StreamHandler(Handler):\n ''\n\n\n\n \n \n terminator='\\n'\n \n def __init__(self,stream=None ):\n ''\n\n\n\n \n Handler.__init__(self)\n if stream is None :\n stream=sys.stderr\n self.stream=stream\n \n def flush(self):\n ''\n\n \n self.acquire()\n try :\n if self.stream and hasattr(self.stream,\"flush\"):\n self.stream.flush()\n finally :\n self.release()\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n\n\n \n try :\n msg=self.format(record)\n stream=self.stream\n stream.write(msg)\n stream.write(self.terminator)\n self.flush()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass FileHandler(StreamHandler):\n ''\n\n \n def __init__(self,filename,mode='a',encoding=None ,delay=False ):\n ''\n\n \n \n \n self.baseFilename=os.path.abspath(filename)\n self.mode=mode\n self.encoding=encoding\n self.delay=delay\n if delay:\n \n \n Handler.__init__(self)\n self.stream=None\n else :\n StreamHandler.__init__(self,self._open())\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n if self.stream:\n self.flush()\n if hasattr(self.stream,\"close\"):\n self.stream.close()\n StreamHandler.close(self)\n self.stream=None\n finally :\n self.release()\n \n def _open(self):\n ''\n\n\n \n return open(self.baseFilename,self.mode,encoding=self.encoding)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self.stream is None :\n self.stream=self._open()\n StreamHandler.emit(self,record)\n \nclass _StderrHandler(StreamHandler):\n ''\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n \n Handler.__init__(self,level)\n \n @property\n def stream(self):\n return sys.stderr\n \n \nclass ConsoleHandler(Handler):\n ''\n\n\n \n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try :\n msg=self.format(record)\n console.log(msg)\n except :\n self.handleError(record)\n \n_defaultLastResort=ConsoleHandler(WARNING)\nlastResort=_defaultLastResort\n\n\n\n\n\nclass PlaceHolder(object):\n ''\n\n\n\n \n def __init__(self,alogger):\n ''\n\n \n self.loggerMap={alogger:None }\n \n def append(self,alogger):\n ''\n\n \n if alogger not in self.loggerMap:\n self.loggerMap[alogger]=None\n \n \n \n \n_loggerClass=None\n\ndef setLoggerClass(klass):\n ''\n\n\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n global _loggerClass\n _loggerClass=klass\n \ndef getLoggerClass():\n ''\n\n \n \n return _loggerClass\n \nclass Manager(object):\n ''\n\n\n \n def __init__(self,rootnode):\n ''\n\n \n self.root=rootnode\n self.disable=0\n self.emittedNoHandlerWarning=False\n self.loggerDict={}\n self.loggerClass=None\n self.logRecordFactory=None\n \n def getLogger(self,name):\n ''\n\n\n\n\n\n\n\n\n \n rv=None\n if not isinstance(name,str):\n raise TypeError('A logger name must be a string')\n _acquireLock()\n try :\n if name in self.loggerDict:\n rv=self.loggerDict[name]\n if isinstance(rv,PlaceHolder):\n ph=rv\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupChildren(ph,rv)\n self._fixupParents(rv)\n else :\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupParents(rv)\n finally :\n _releaseLock()\n return rv\n \n def setLoggerClass(self,klass):\n ''\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n self.loggerClass=klass\n \n def setLogRecordFactory(self,factory):\n ''\n\n\n \n self.logRecordFactory=factory\n \n def _fixupParents(self,alogger):\n ''\n\n\n \n name=alogger.name\n i=name.rfind(\".\")\n rv=None\n while (i >0)and not rv:\n substr=name[:i]\n if substr not in self.loggerDict:\n self.loggerDict[substr]=PlaceHolder(alogger)\n else :\n obj=self.loggerDict[substr]\n if isinstance(obj,Logger):\n rv=obj\n else :\n assert isinstance(obj,PlaceHolder)\n obj.append(alogger)\n i=name.rfind(\".\",0,i -1)\n if not rv:\n rv=self.root\n alogger.parent=rv\n \n def _fixupChildren(self,ph,alogger):\n ''\n\n\n \n name=alogger.name\n namelen=len(name)\n for c in ph.loggerMap.keys():\n \n if c.parent.name[:namelen]!=name:\n alogger.parent=c.parent\n c.parent=alogger\n \n \n \n \n \nclass Logger(Filterer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level=NOTSET):\n ''\n\n \n Filterer.__init__(self)\n self.name=name\n self.level=_checkLevel(level)\n self.parent=None\n self.propagate=True\n self.handlers=[]\n self.disabled=False\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def debug(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(DEBUG):\n self._log(DEBUG,msg,args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(INFO):\n self._log(INFO,msg,args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(WARNING):\n self._log(WARNING,msg,args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(ERROR):\n self._log(ERROR,msg,args,**kwargs)\n \n def exception(self,msg,*args,**kwargs):\n ''\n\n \n kwargs['exc_info']=True\n self.error(msg,*args,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(CRITICAL):\n self._log(CRITICAL,msg,args,**kwargs)\n \n fatal=critical\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if not isinstance(level,int):\n if raiseExceptions:\n raise TypeError(\"level must be an integer\")\n else :\n return\n if self.isEnabledFor(level):\n self._log(level,msg,args,**kwargs)\n \n def findCaller(self,stack_info=False ):\n ''\n\n\n \n f=currentframe()\n \n \n if f is not None :\n f=f.f_back\n rv=\"(unknown file)\",0,\"(unknown function)\",None\n while hasattr(f,\"f_code\"):\n co=f.f_code\n filename=os.path.normcase(co.co_filename)\n if filename ==_srcfile:\n f=f.f_back\n continue\n sinfo=None\n if stack_info:\n sio=io.StringIO()\n sio.write('Stack (most recent call last):\\n')\n traceback.print_stack(f,file=sio)\n sinfo=sio.getvalue()\n if sinfo[-1]=='\\n':\n sinfo=sinfo[:-1]\n sio.close()\n rv=(co.co_filename,f.f_lineno,co.co_name,sinfo)\n break\n return rv\n \n def makeRecord(self,name,level,fn,lno,msg,args,exc_info,\n func=None ,extra=None ,sinfo=None ):\n ''\n\n\n \n rv=_logRecordFactory(name,level,fn,lno,msg,args,exc_info,func,\n sinfo)\n if extra is not None :\n for key in extra:\n if (key in [\"message\",\"asctime\"])or (key in rv.__dict__):\n raise KeyError(\"Attempt to overwrite %r in LogRecord\"%key)\n rv.__dict__[key]=extra[key]\n return rv\n \n def _log(self,level,msg,args,exc_info=None ,extra=None ,stack_info=False ):\n ''\n\n\n \n sinfo=None\n if _srcfile:\n \n \n \n try :\n fn,lno,func,sinfo=self.findCaller(stack_info)\n except ValueError:\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n else :\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n if exc_info:\n if not isinstance(exc_info,tuple):\n exc_info=sys.exc_info()\n record=self.makeRecord(self.name,level,fn,lno,msg,args,\n exc_info,func,extra,sinfo)\n self.handle(record)\n \n def handle(self,record):\n ''\n\n\n\n\n \n if (not self.disabled)and self.filter(record):\n self.callHandlers(record)\n \n def addHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try :\n if not (hdlr in self.handlers):\n self.handlers.append(hdlr)\n finally :\n _releaseLock()\n \n def removeHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try :\n if hdlr in self.handlers:\n self.handlers.remove(hdlr)\n finally :\n _releaseLock()\n \n def hasHandlers(self):\n ''\n\n\n\n\n\n\n\n \n c=self\n rv=False\n while c:\n if c.handlers:\n rv=True\n break\n if not c.propagate:\n break\n else :\n c=c.parent\n return rv\n \n def callHandlers(self,record):\n ''\n\n\n\n\n\n\n\n \n c=self\n found=0\n while c:\n for hdlr in c.handlers:\n found=found+1\n if record.levelno >=hdlr.level:\n hdlr.handle(record)\n if not c.propagate:\n c=None\n else :\n c=c.parent\n if (found ==0):\n if lastResort:\n if record.levelno >=lastResort.level:\n lastResort.handle(record)\n elif raiseExceptions and not self.manager.emittedNoHandlerWarning:\n sys.stderr.write(\"No handlers could be found for logger\"\n \" \\\"%s\\\"\\n\"%self.name)\n self.manager.emittedNoHandlerWarning=True\n \n def getEffectiveLevel(self):\n ''\n\n\n\n\n \n logger=self\n while logger:\n if logger.level:\n return logger.level\n logger=logger.parent\n return NOTSET\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.manager.disable >=level:\n return False\n return level >=self.getEffectiveLevel()\n \n def getChild(self,suffix):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.root is not self:\n suffix='.'.join((self.name,suffix))\n return self.manager.getLogger(suffix)\n \nclass RootLogger(Logger):\n ''\n\n\n\n \n def __init__(self,level):\n ''\n\n \n Logger.__init__(self,\"root\",level)\n \n_loggerClass=Logger\n\nclass LoggerAdapter(object):\n ''\n\n\n \n \n def __init__(self,logger,extra):\n ''\n\n\n\n\n\n\n\n\n \n self.logger=logger\n self.extra=extra\n \n def process(self,msg,kwargs):\n ''\n\n\n\n\n\n\n\n \n kwargs[\"extra\"]=self.extra\n return msg,kwargs\n \n \n \n \n def debug(self,msg,*args,**kwargs):\n ''\n\n \n self.log(DEBUG,msg,*args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n \n self.log(INFO,msg,*args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n \n self.log(WARNING,msg,*args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n \n self.log(ERROR,msg,*args,**kwargs)\n \n def exception(self,msg,*args,**kwargs):\n ''\n\n \n kwargs[\"exc_info\"]=True\n self.log(ERROR,msg,*args,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n \n self.log(CRITICAL,msg,*args,**kwargs)\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n \n if self.isEnabledFor(level):\n msg,kwargs=self.process(msg,kwargs)\n self.logger._log(level,msg,args,**kwargs)\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.logger.manager.disable >=level:\n return False\n return level >=self.getEffectiveLevel()\n \n def setLevel(self,level):\n ''\n\n \n self.logger.setLevel(level)\n \n def getEffectiveLevel(self):\n ''\n\n \n return self.logger.getEffectiveLevel()\n \n def hasHandlers(self):\n ''\n\n \n return self.logger.hasHandlers()\n \nroot=RootLogger(WARNING)\nLogger.root=root\nLogger.manager=Manager(Logger.root)\n\n\n\n\n\nBASIC_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n\ndef basicConfig(**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n _acquireLock()\n try :\n if len(root.handlers)==0:\n handlers=kwargs.get(\"handlers\")\n if handlers is None :\n if \"stream\"in kwargs and \"filename\"in kwargs:\n raise ValueError(\"'stream' and 'filename' should not be \"\n \"specified together\")\n else :\n if \"stream\"in kwargs or \"filename\"in kwargs:\n raise ValueError(\"'stream' or 'filename' should not be \"\n \"specified together with 'handlers'\")\n if handlers is None :\n filename=kwargs.get(\"filename\")\n if filename:\n mode=kwargs.get(\"filemode\",'a')\n h=FileHandler(filename,mode)\n else :\n stream=kwargs.get(\"stream\")\n if stream:\n h=StreamHandler(stream)\n else :\n h=ConsoleHandler()\n handlers=[h]\n fs=kwargs.get(\"format\",BASIC_FORMAT)\n dfs=kwargs.get(\"datefmt\",None )\n style=kwargs.get(\"style\",'%')\n fmt=Formatter(fs,dfs,style)\n for h in handlers:\n if h.formatter is None :\n h.setFormatter(fmt)\n root.addHandler(h)\n level=kwargs.get(\"level\")\n if level is not None :\n root.setLevel(level)\n finally :\n _releaseLock()\n \n \n \n \n \n \ndef getLogger(name=None ):\n ''\n\n\n\n \n if name:\n return Logger.manager.getLogger(name)\n else :\n return root\n \ndef critical(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.critical(msg,*args,**kwargs)\n \nfatal=critical\n\ndef error(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.error(msg,*args,**kwargs)\n \ndef exception(msg,*args,**kwargs):\n ''\n\n\n\n \n kwargs['exc_info']=True\n error(msg,*args,**kwargs)\n \ndef warning(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.warning(msg,*args,**kwargs)\n \ndef warn(msg,*args,**kwargs):\n warnings.warn(\"The 'warn' function is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n warning(msg,*args,**kwargs)\n \ndef info(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.info(msg,*args,**kwargs)\n \ndef debug(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.debug(msg,*args,**kwargs)\n \ndef log(level,msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.log(level,msg,*args,**kwargs)\n \ndef disable(level):\n ''\n\n \n root.manager.disable=level\n \ndef shutdown(handlerList=_handlerList):\n ''\n\n\n\n\n \n for wr in reversed(handlerList[:]):\n \n \n try :\n h=wr()\n if h:\n try :\n h.acquire()\n h.flush()\n h.close()\n except (IOError,ValueError):\n \n \n \n \n pass\n finally :\n h.release()\n except :\n if raiseExceptions:\n raise\n \n \n \nimport atexit\natexit.register(shutdown)\n\n\n\nclass NullHandler(Handler):\n ''\n\n\n\n\n\n\n\n \n def handle(self,record):\n ''\n \n def emit(self,record):\n ''\n \n def createLock(self):\n self.lock=None\n \n \n \n_warnings_showwarning=None\n\ndef _showwarning(message,category,filename,lineno,file=None ,line=None ):\n ''\n\n\n\n\n\n \n if file is not None :\n if _warnings_showwarning is not None :\n _warnings_showwarning(message,category,filename,lineno,file,line)\n else :\n s=warnings.formatwarning(message,category,filename,lineno,line)\n logger=getLogger(\"py.warnings\")\n if not logger.handlers:\n logger.addHandler(NullHandler())\n logger.warning(\"%s\",s)\n \ndef captureWarnings(capture):\n ''\n\n\n\n \n global _warnings_showwarning\n if capture:\n if _warnings_showwarning is None :\n _warnings_showwarning=warnings.showwarning\n warnings.showwarning=_showwarning\n else :\n if _warnings_showwarning is not None :\n warnings.showwarning=_warnings_showwarning\n _warnings_showwarning=None\n", ["atexit", "browser", "io", "os", "string", "sys", "threading", "time", "traceback", "warnings", "weakref"], 1], "multiprocessing.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None ]\n\n\nclass Listener(object):\n\n def __init__(self,address=None ,family=None ,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True ):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "multiprocessing.pool": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Pool']\n\n\n\n\n\nimport threading\nimport queue\nimport itertools\nimport collections\nimport time\n\nfrom multiprocessing import Process,cpu_count,TimeoutError\nfrom multiprocessing.util import Finalize,debug\n\n\n\n\n\nRUN=0\nCLOSE=1\nTERMINATE=2\n\n\n\n\n\njob_counter=itertools.count()\n\ndef mapstar(args):\n return list(map(*args))\n \ndef starmapstar(args):\n return list(itertools.starmap(args[0],args[1]))\n \n \n \n \n \nclass MaybeEncodingError(Exception):\n ''\n \n \n def __init__(self,exc,value):\n self.exc=repr(exc)\n self.value=repr(value)\n super(MaybeEncodingError,self).__init__(self.exc,self.value)\n \n def __str__(self):\n return \"Error sending result: '%s'. Reason: '%s'\"%(self.value,\n self.exc)\n \n def __repr__(self):\n return \"\"%str(self)\n \n \ndef worker(inqueue,outqueue,initializer=None ,initargs=(),maxtasks=None ):\n assert maxtasks is None or (type(maxtasks)==int and maxtasks >0)\n put=outqueue.put\n get=inqueue.get\n if hasattr(inqueue,'_writer'):\n inqueue._writer.close()\n outqueue._reader.close()\n \n if initializer is not None :\n initializer(*initargs)\n \n completed=0\n while maxtasks is None or (maxtasks and completed 1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return (item for chunk in result for item in chunk)\n \n def imap_unordered(self,func,iterable,chunksize=1):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if chunksize ==1:\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,func,(x,),{})\n for i,x in enumerate(iterable)),result._set_length))\n return result\n else :\n assert chunksize >1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return (item for chunk in result for item in chunk)\n \n def apply_async(self,func,args=(),kwds={},callback=None ,\n error_callback=None ):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n result=ApplyResult(self._cache,callback,error_callback)\n self._taskqueue.put(([(result._job,None ,func,args,kwds)],None ))\n return result\n \n def map_async(self,func,iterable,chunksize=None ,callback=None ,\n error_callback=None ):\n ''\n\n \n return self._map_async(func,iterable,mapstar,chunksize,callback,\n error_callback)\n \n def _map_async(self,func,iterable,mapper,chunksize=None ,callback=None ,\n error_callback=None ):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if not hasattr(iterable,'__len__'):\n iterable=list(iterable)\n \n if chunksize is None :\n chunksize,extra=divmod(len(iterable),len(self._pool)*4)\n if extra:\n chunksize +=1\n if len(iterable)==0:\n chunksize=0\n \n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=MapResult(self._cache,chunksize,len(iterable),callback,\n error_callback=error_callback)\n self._taskqueue.put((((result._job,i,mapper,(x,),{})\n for i,x in enumerate(task_batches)),None ))\n return result\n \n @staticmethod\n def _handle_workers(pool):\n thread=threading.current_thread()\n \n \n \n while thread._state ==RUN or (pool._cache and thread._state !=TERMINATE):\n pool._maintain_pool()\n time.sleep(0.1)\n \n pool._taskqueue.put(None )\n debug('worker handler exiting')\n \n @staticmethod\n def _handle_tasks(taskqueue,put,outqueue,pool):\n thread=threading.current_thread()\n \n for taskseq,set_length in iter(taskqueue.get,None ):\n i=-1\n for i,task in enumerate(taskseq):\n if thread._state:\n debug('task handler found thread._state != RUN')\n break\n try :\n put(task)\n except IOError:\n debug('could not put task on queue')\n break\n else :\n if set_length:\n debug('doing set_length()')\n set_length(i+1)\n continue\n break\n else :\n debug('task handler got sentinel')\n \n \n try :\n \n debug('task handler sending sentinel to result handler')\n outqueue.put(None )\n \n \n debug('task handler sending sentinel to workers')\n for p in pool:\n put(None )\n except IOError:\n debug('task handler got IOError when sending sentinels')\n \n debug('task handler exiting')\n \n @staticmethod\n def _handle_results(outqueue,get,cache):\n thread=threading.current_thread()\n \n while 1:\n try :\n task=get()\n except (IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if thread._state:\n assert thread._state ==TERMINATE\n debug('result handler found thread._state=TERMINATE')\n break\n \n if task is None :\n debug('result handler got sentinel')\n break\n \n job,i,obj=task\n try :\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n while cache and thread._state !=TERMINATE:\n try :\n task=get()\n except (IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if task is None :\n debug('result handler ignoring extra sentinel')\n continue\n job,i,obj=task\n try :\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n if hasattr(outqueue,'_reader'):\n debug('ensuring that outqueue is not full')\n \n \n \n try :\n for i in range(10):\n if not outqueue._reader.poll():\n break\n get()\n except (IOError,EOFError):\n pass\n \n debug('result handler exiting: len(cache)=%s, thread._state=%s',\n len(cache),thread._state)\n \n @staticmethod\n def _get_tasks(func,it,size):\n it=iter(it)\n while 1:\n x=tuple(itertools.islice(it,size))\n if not x:\n return\n yield (func,x)\n \n def __reduce__(self):\n raise NotImplementedError(\n 'pool objects cannot be passed between processes or pickled'\n )\n \n def close(self):\n debug('closing pool')\n if self._state ==RUN:\n self._state=CLOSE\n self._worker_handler._state=CLOSE\n \n def terminate(self):\n debug('terminating pool')\n self._state=TERMINATE\n self._worker_handler._state=TERMINATE\n self._terminate()\n \n def join(self):\n debug('joining pool')\n assert self._state in (CLOSE,TERMINATE)\n self._worker_handler.join()\n self._task_handler.join()\n self._result_handler.join()\n for p in self._pool:\n p.join()\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n debug('removing tasks from inqueue until task handler finished')\n inqueue._rlock.acquire()\n while task_handler.is_alive()and inqueue._reader.poll():\n inqueue._reader.recv()\n time.sleep(0)\n \n @classmethod\n def _terminate_pool(cls,taskqueue,inqueue,outqueue,pool,\n worker_handler,task_handler,result_handler,cache):\n \n debug('finalizing pool')\n \n worker_handler._state=TERMINATE\n task_handler._state=TERMINATE\n \n debug('helping task handler/workers to finish')\n cls._help_stuff_finish(inqueue,task_handler,len(pool))\n \n assert result_handler.is_alive()or len(cache)==0\n \n result_handler._state=TERMINATE\n outqueue.put(None )\n \n \n \n debug('joining worker handler')\n if threading.current_thread()is not worker_handler:\n worker_handler.join()\n \n \n if pool and hasattr(pool[0],'terminate'):\n debug('terminating workers')\n for p in pool:\n if p.exitcode is None :\n p.terminate()\n \n debug('joining task handler')\n if threading.current_thread()is not task_handler:\n task_handler.join()\n \n debug('joining result handler')\n if threading.current_thread()is not result_handler:\n result_handler.join()\n \n if pool and hasattr(pool[0],'terminate'):\n debug('joining pool workers')\n for p in pool:\n if p.is_alive():\n \n debug('cleaning up worker %d'%p.pid)\n p.join()\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_val,exc_tb):\n self.terminate()\n \n \n \n \n \nclass ApplyResult(object):\n\n def __init__(self,cache,callback,error_callback):\n self._event=threading.Event()\n self._job=next(job_counter)\n self._cache=cache\n self._callback=callback\n self._error_callback=error_callback\n cache[self._job]=self\n \n def ready(self):\n return self._event.is_set()\n \n def successful(self):\n assert self.ready()\n return self._success\n \n def wait(self,timeout=None ):\n self._event.wait(timeout)\n \n def get(self,timeout=None ):\n self.wait(timeout)\n if not self.ready():\n raise TimeoutError\n if self._success:\n return self._value\n else :\n raise self._value\n \n def _set(self,i,obj):\n self._success,self._value=obj\n if self._callback and self._success:\n self._callback(self._value)\n if self._error_callback and not self._success:\n self._error_callback(self._value)\n self._event.set()\n del self._cache[self._job]\n \nAsyncResult=ApplyResult\n\n\n\n\n\nclass MapResult(ApplyResult):\n\n def __init__(self,cache,chunksize,length,callback,error_callback):\n ApplyResult.__init__(self,cache,callback,\n error_callback=error_callback)\n self._success=True\n self._value=[None ]*length\n self._chunksize=chunksize\n if chunksize <=0:\n self._number_left=0\n self._event.set()\n del cache[self._job]\n else :\n self._number_left=length //chunksize+bool(length %chunksize)\n \n def _set(self,i,success_result):\n success,result=success_result\n if success:\n self._value[i *self._chunksize:(i+1)*self._chunksize]=result\n self._number_left -=1\n if self._number_left ==0:\n if self._callback:\n self._callback(self._value)\n del self._cache[self._job]\n self._event.set()\n else :\n self._success=False\n self._value=result\n if self._error_callback:\n self._error_callback(self._value)\n del self._cache[self._job]\n self._event.set()\n \n \n \n \n \nclass IMapIterator(object):\n\n def __init__(self,cache):\n self._cond=threading.Condition(threading.Lock())\n self._job=next(job_counter)\n self._cache=cache\n self._items=collections.deque()\n self._index=0\n self._length=None\n self._unsorted={}\n cache[self._job]=self\n \n def __iter__(self):\n return self\n \n def next(self,timeout=None ):\n self._cond.acquire()\n try :\n try :\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n self._cond.wait(timeout)\n try :\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n raise TimeoutError\n finally :\n self._cond.release()\n \n success,value=item\n if success:\n return value\n raise value\n \n __next__=next\n \n def _set(self,i,obj):\n self._cond.acquire()\n try :\n if self._index ==i:\n self._items.append(obj)\n self._index +=1\n while self._index in self._unsorted:\n obj=self._unsorted.pop(self._index)\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n else :\n self._unsorted[i]=obj\n \n if self._index ==self._length:\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n def _set_length(self,length):\n self._cond.acquire()\n try :\n self._length=length\n if self._index ==self._length:\n self._cond.notify()\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n \n \n \n \nclass IMapUnorderedIterator(IMapIterator):\n\n def _set(self,i,obj):\n self._cond.acquire()\n try :\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n if self._index ==self._length:\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n \n \n \n \nclass ThreadPool(Pool):\n\n from .dummy import Process\n \n def __init__(self,processes=None ,initializer=None ,initargs=()):\n Pool.__init__(self,processes,initializer,initargs)\n \n def _setup_queues(self):\n self._inqueue=queue.Queue()\n self._outqueue=queue.Queue()\n self._quick_put=self._inqueue.put\n self._quick_get=self._outqueue.get\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n inqueue.not_empty.acquire()\n try :\n inqueue.queue.clear()\n inqueue.queue.extend([None ]*size)\n inqueue.not_empty.notify_all()\n finally :\n inqueue.not_empty.release()\n", ["collections", "itertools", "multiprocessing", "multiprocessing.Process", "multiprocessing.dummy", "multiprocessing.queues", "multiprocessing.util", "queue", "threading", "time"]], "multiprocessing.process": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Process','current_process','active_children']\n\n\n\n\n\nimport os\nimport sys\nimport signal\nimport itertools\nfrom _weakrefset import WeakSet\n\n\nfrom _multiprocessing import Process\n\n\n\n\ntry :\n ORIGINAL_DIR=os.path.abspath(os.getcwd())\nexcept OSError:\n ORIGINAL_DIR=None\n \n \n \n \n \ndef current_process():\n ''\n\n \n return _current_process\n \ndef active_children():\n ''\n\n \n _cleanup()\n return list(_current_process._children)\n \n \n \n \n \ndef _cleanup():\n\n for p in list(_current_process._children):\n if p._popen.poll()is not None :\n _current_process._children.discard(p)\n \n \n \n \n \n \n \n \n \n \n \n \nclass AuthenticationString(bytes):\n def __reduce__(self):\n from .forking import Popen\n if not Popen.thread_is_spawning():\n raise TypeError(\n 'Pickling an AuthenticationString object is '\n 'disallowed for security reasons'\n )\n return AuthenticationString,(bytes(self),)\n \n \n \n \n \nclass _MainProcess(Process):\n\n def __init__(self):\n self._identity=()\n self._daemonic=False\n self._name='MainProcess'\n self._parent_pid=None\n self._popen=None\n self._counter=itertools.count(1)\n self._children=set()\n self._authkey=AuthenticationString(os.urandom(32))\n self._tempdir=None\n \n_current_process=_MainProcess()\ndel _MainProcess\n\n\n\n\n\n_exitcode_to_name={}\n\nfor name,signum in list(signal.__dict__.items()):\n if name[:3]=='SIG'and '_'not in name:\n _exitcode_to_name[-signum]=name\n \n \n_dangling=WeakSet()\n", ["_multiprocessing", "_weakrefset", "itertools", "multiprocessing.forking", "os", "signal", "sys"]], "multiprocessing.util": [".py", "\n\n\n\n\n\n\n\n\nimport sys\nimport functools\nimport os\nimport itertools\nimport weakref\nimport atexit\nimport threading\n\nfrom subprocess import _args_from_interpreter_flags\n\nfrom multiprocessing.process import current_process,active_children\n\n__all__=[\n'sub_debug','debug','info','sub_warning','get_logger',\n'log_to_stderr','get_temp_dir','register_after_fork',\n'is_exiting','Finalize','ForkAwareThreadLock','ForkAwareLocal',\n'SUBDEBUG','SUBWARNING',\n]\n\n\n\n\n\nNOTSET=0\nSUBDEBUG=5\nDEBUG=10\nINFO=20\nSUBWARNING=25\n\nLOGGER_NAME='multiprocessing'\nDEFAULT_LOGGING_FORMAT='[%(levelname)s/%(processName)s] %(message)s'\n\n_logger=None\n_log_to_stderr=False\n\ndef sub_debug(msg,*args):\n if _logger:\n _logger.log(SUBDEBUG,msg,*args)\n \ndef debug(msg,*args):\n if _logger:\n _logger.log(DEBUG,msg,*args)\n \ndef info(msg,*args):\n if _logger:\n _logger.log(INFO,msg,*args)\n \ndef sub_warning(msg,*args):\n if _logger:\n _logger.log(SUBWARNING,msg,*args)\n \ndef get_logger():\n ''\n\n \n global _logger\n import logging\n \n logging._acquireLock()\n try :\n if not _logger:\n \n _logger=logging.getLogger(LOGGER_NAME)\n _logger.propagate=0\n logging.addLevelName(SUBDEBUG,'SUBDEBUG')\n logging.addLevelName(SUBWARNING,'SUBWARNING')\n \n \n if hasattr(atexit,'unregister'):\n atexit.unregister(_exit_function)\n atexit.register(_exit_function)\n else :\n atexit._exithandlers.remove((_exit_function,(),{}))\n atexit._exithandlers.append((_exit_function,(),{}))\n \n finally :\n logging._releaseLock()\n \n return _logger\n \ndef log_to_stderr(level=None ):\n ''\n\n \n global _log_to_stderr\n import logging\n \n logger=get_logger()\n formatter=logging.Formatter(DEFAULT_LOGGING_FORMAT)\n handler=logging.StreamHandler()\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n \n if level:\n logger.setLevel(level)\n _log_to_stderr=True\n return _logger\n \n \n \n \n \ndef get_temp_dir():\n\n if current_process()._tempdir is None :\n import shutil,tempfile\n tempdir=tempfile.mkdtemp(prefix='pymp-')\n info('created temp directory %s',tempdir)\n Finalize(None ,shutil.rmtree,args=[tempdir],exitpriority=-100)\n current_process()._tempdir=tempdir\n return current_process()._tempdir\n \n \n \n \n \n_afterfork_registry=weakref.WeakValueDictionary()\n_afterfork_counter=itertools.count()\n\ndef _run_after_forkers():\n items=list(_afterfork_registry.items())\n items.sort()\n for (index,ident,func),obj in items:\n try :\n func(obj)\n except Exception as e:\n info('after forker raised exception %s',e)\n \ndef register_after_fork(obj,func):\n _afterfork_registry[(next(_afterfork_counter),id(obj),func)]=obj\n \n \n \n \n \n_finalizer_registry={}\n_finalizer_counter=itertools.count()\n\n\nclass Finalize(object):\n ''\n\n \n def __init__(self,obj,callback,args=(),kwargs=None ,exitpriority=None ):\n assert exitpriority is None or type(exitpriority)is int\n \n if obj is not None :\n self._weakref=weakref.ref(obj,self)\n else :\n assert exitpriority is not None\n \n self._callback=callback\n self._args=args\n self._kwargs=kwargs or {}\n self._key=(exitpriority,next(_finalizer_counter))\n self._pid=os.getpid()\n \n _finalizer_registry[self._key]=self\n \n def __call__(self,wr=None ,\n \n \n _finalizer_registry=_finalizer_registry,\n sub_debug=sub_debug,getpid=os.getpid):\n ''\n\n \n try :\n del _finalizer_registry[self._key]\n except KeyError:\n sub_debug('finalizer no longer registered')\n else :\n if self._pid !=getpid():\n sub_debug('finalizer ignored because different process')\n res=None\n else :\n sub_debug('finalizer calling %s with args %s and kwargs %s',\n self._callback,self._args,self._kwargs)\n res=self._callback(*self._args,**self._kwargs)\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n return res\n \n def cancel(self):\n ''\n\n \n try :\n del _finalizer_registry[self._key]\n except KeyError:\n pass\n else :\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n \n def still_active(self):\n ''\n\n \n return self._key in _finalizer_registry\n \n def __repr__(self):\n try :\n obj=self._weakref()\n except (AttributeError,TypeError):\n obj=None\n \n if obj is None :\n return ''\n \n x=''\n \n \ndef _run_finalizers(minpriority=None ):\n ''\n\n\n\n\n \n if _finalizer_registry is None :\n \n \n \n return\n \n if minpriority is None :\n f=lambda p:p[0][0]is not None\n else :\n f=lambda p:p[0][0]is not None and p[0][0]>=minpriority\n \n items=[x for x in list(_finalizer_registry.items())if f(x)]\n items.sort(reverse=True )\n \n for key,finalizer in items:\n sub_debug('calling %s',finalizer)\n try :\n finalizer()\n except Exception:\n import traceback\n traceback.print_exc()\n \n if minpriority is None :\n _finalizer_registry.clear()\n \n \n \n \n \ndef is_exiting():\n ''\n\n \n return _exiting or _exiting is None\n \n_exiting=False\n\ndef _exit_function(info=info,debug=debug,_run_finalizers=_run_finalizers,\nactive_children=active_children,\ncurrent_process=current_process):\n\n\n\n\n global _exiting\n \n if not _exiting:\n _exiting=True\n \n info('process shutting down')\n debug('running all \"atexit\" finalizers with priority >= 0')\n _run_finalizers(0)\n \n if current_process()is not None :\n \n \n \n \n \n \n \n \n \n \n \n \n \n for p in active_children():\n if p._daemonic:\n info('calling terminate() for daemon %s',p.name)\n p._popen.terminate()\n \n for p in active_children():\n info('calling join() for process %s',p.name)\n p.join()\n \n debug('running the remaining \"atexit\" finalizers')\n _run_finalizers()\n \natexit.register(_exit_function)\n\n\n\n\n\nclass ForkAwareThreadLock(object):\n def __init__(self):\n self._reset()\n register_after_fork(self,ForkAwareThreadLock._reset)\n \n def _reset(self):\n self._lock=threading.Lock()\n self.acquire=self._lock.acquire\n self.release=self._lock.release\n \nclass ForkAwareLocal(threading.local):\n def __init__(self):\n register_after_fork(self,lambda obj:obj.__dict__.clear())\n def __reduce__(self):\n return type(self),()\n", ["atexit", "functools", "itertools", "logging", "multiprocessing.process", "os", "shutil", "subprocess", "sys", "tempfile", "threading", "traceback", "weakref"]], "multiprocessing": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='0.70a1'\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Manager','Pipe','cpu_count','log_to_stderr','get_logger',\n'allow_connection_pickling','BufferTooShort','TimeoutError',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','SimpleQueue','JoinableQueue','Pool',\n'Value','Array','RawValue','RawArray','SUBDEBUG','SUBWARNING',\n]\n\n__author__='R. Oudkerk (r.m.oudkerk@gmail.com)'\n\n\n\n\n\nimport os\nimport sys\n\nfrom multiprocessing.process import Process,current_process,active_children\nfrom multiprocessing.util import SUBDEBUG,SUBWARNING\n\n\n\n\n\nclass ProcessError(Exception):\n pass\n \nclass BufferTooShort(ProcessError):\n pass\n \nclass TimeoutError(ProcessError):\n pass\n \nclass AuthenticationError(ProcessError):\n pass\n \nimport _multiprocessing\n\n\n\n\n\ndef Manager():\n ''\n\n\n\n\n \n from multiprocessing.managers import SyncManager\n m=SyncManager()\n m.start()\n return m\n \n \n \n \n \n \n \n \n \ndef cpu_count():\n ''\n\n \n if sys.platform =='win32':\n try :\n num=int(os.environ['NUMBER_OF_PROCESSORS'])\n except (ValueError,KeyError):\n num=0\n elif 'bsd'in sys.platform or sys.platform =='darwin':\n comm='/sbin/sysctl -n hw.ncpu'\n if sys.platform =='darwin':\n comm='/usr'+comm\n try :\n with os.popen(comm)as p:\n num=int(p.read())\n except ValueError:\n num=0\n else :\n try :\n num=os.sysconf('SC_NPROCESSORS_ONLN')\n except (ValueError,OSError,AttributeError):\n num=0\n \n if num >=1:\n return num\n else :\n raise NotImplementedError('cannot determine number of cpus')\n \ndef freeze_support():\n ''\n\n\n \n if sys.platform =='win32'and getattr(sys,'frozen',False ):\n from multiprocessing.forking import freeze_support\n freeze_support()\n \ndef get_logger():\n ''\n\n \n from multiprocessing.util import get_logger\n return get_logger()\n \ndef log_to_stderr(level=None ):\n ''\n\n \n from multiprocessing.util import log_to_stderr\n return log_to_stderr(level)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef Lock():\n ''\n\n \n from multiprocessing.synchronize import Lock\n return Lock()\n \ndef RLock():\n ''\n\n \n from multiprocessing.synchronize import RLock\n return RLock()\n \ndef Condition(lock=None ):\n ''\n\n \n from multiprocessing.synchronize import Condition\n return Condition(lock)\n \ndef Semaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import Semaphore\n return Semaphore(value)\n \ndef BoundedSemaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import BoundedSemaphore\n return BoundedSemaphore(value)\n \ndef Event():\n ''\n\n \n from multiprocessing.synchronize import Event\n return Event()\n \ndef Barrier(parties,action=None ,timeout=None ):\n ''\n\n \n from multiprocessing.synchronize import Barrier\n return Barrier(parties,action,timeout)\n \ndef Queue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import Queue\n return Queue(maxsize)\n \ndef JoinableQueue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import JoinableQueue\n return JoinableQueue(maxsize)\n \ndef SimpleQueue():\n ''\n\n \n from multiprocessing.queues import SimpleQueue\n return SimpleQueue()\n \ndef Pool(processes=None ,initializer=None ,initargs=(),maxtasksperchild=None ):\n ''\n\n \n from multiprocessing.pool import Pool\n return Pool(processes,initializer,initargs,maxtasksperchild)\n \ndef RawValue(typecode_or_type,*args):\n ''\n\n \n from multiprocessing.sharedctypes import RawValue\n return RawValue(typecode_or_type,*args)\n \ndef RawArray(typecode_or_type,size_or_initializer):\n ''\n\n \n from multiprocessing.sharedctypes import RawArray\n return RawArray(typecode_or_type,size_or_initializer)\n \ndef Value(typecode_or_type,*args,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Value\n return Value(typecode_or_type,*args,lock=lock)\n \ndef Array(typecode_or_type,size_or_initializer,*,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Array\n return Array(typecode_or_type,size_or_initializer,lock=lock)\n \n \n \n \n \nif sys.platform =='win32':\n\n def set_executable(executable):\n ''\n\n\n\n \n from multiprocessing.forking import set_executable\n set_executable(executable)\n \n __all__ +=['set_executable']\n", ["_multiprocessing", "multiprocessing.forking", "multiprocessing.managers", "multiprocessing.pool", "multiprocessing.process", "multiprocessing.queues", "multiprocessing.sharedctypes", "multiprocessing.synchronize", "multiprocessing.util", "os", "sys"], 1], "multiprocessing.dummy.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None ]\n\n\nclass Listener(object):\n\n def __init__(self,address=None ,family=None ,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True ):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "multiprocessing.dummy": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','Manager','Pipe','Pool','JoinableQueue'\n]\n\n\n\n\n\nimport threading\nimport sys\nimport weakref\n\n\n\nfrom multiprocessing.dummy.connection import Pipe\nfrom threading import Lock,RLock,Semaphore,BoundedSemaphore\nfrom threading import Event,Condition,Barrier\nfrom queue import Queue\n\n\n\n\n\nclass DummyProcess(threading.Thread):\n\n def __init__(self,group=None ,target=None ,name=None ,args=(),kwargs={}):\n threading.Thread.__init__(self,group,target,name,args,kwargs)\n self._pid=None\n self._children=weakref.WeakKeyDictionary()\n self._start_called=False\n self._parent=current_process()\n \n def start(self):\n assert self._parent is current_process()\n self._start_called=True\n if hasattr(self._parent,'_children'):\n self._parent._children[self]=None\n threading.Thread.start(self)\n \n @property\n def exitcode(self):\n if self._start_called and not self.is_alive():\n return 0\n else :\n return None\n \n \n \n \n \nProcess=DummyProcess\ncurrent_process=threading.current_thread\ncurrent_process()._children=weakref.WeakKeyDictionary()\n\ndef active_children():\n children=current_process()._children\n for p in list(children):\n if not p.is_alive():\n children.pop(p,None )\n return list(children)\n \ndef freeze_support():\n pass\n \n \n \n \n \nclass Namespace(object):\n def __init__(self,**kwds):\n self.__dict__.update(kwds)\n def __repr__(self):\n items=list(self.__dict__.items())\n temp=[]\n for name,value in items:\n if not name.startswith('_'):\n temp.append('%s=%r'%(name,value))\n temp.sort()\n return 'Namespace(%s)'%str.join(', ',temp)\n \ndict=dict\nlist=list\n\n\n\n\n\nclass Value(object):\n def __init__(self,typecode,value,lock=True ):\n self._typecode=typecode\n self._value=value\n def _get(self):\n return self._value\n def _set(self,value):\n self._value=value\n value=property(_get,_set)\n def __repr__(self):\n return '<%r(%r, %r)>'%(type(self).__name__,self._typecode,self._value)\n \ndef Manager():\n return sys.modules[__name__]\n \ndef shutdown():\n pass\n \ndef Pool(processes=None ,initializer=None ,initargs=()):\n from multiprocessing.pool import ThreadPool\n return ThreadPool(processes,initializer,initargs)\n \nJoinableQueue=Queue\n", ["multiprocessing.dummy.connection", "multiprocessing.pool", "queue", "sys", "threading", "weakref"], 1], "pydoc_data.topics": [".py", "\n\ntopics={'assert':'\\nThe ``assert`` statement\\n************************\\n\\nAssert statements are a convenient way to insert debugging assertions\\ninto a program:\\n\\n assert_stmt ::= \"assert\" expression [\",\" expression]\\n\\nThe simple form, ``assert expression``, is equivalent to\\n\\n if __debug__:\\n if not expression: raise AssertionError\\n\\nThe extended form, ``assert expression1, expression2``, is equivalent\\nto\\n\\n if __debug__:\\n if not expression1: raise AssertionError(expression2)\\n\\nThese equivalences assume that ``__debug__`` and ``AssertionError``\\nrefer to the built-in variables with those names. In the current\\nimplementation, the built-in variable ``__debug__`` is ``True`` under\\nnormal circumstances, ``False`` when optimization is requested\\n(command line option -O). The current code generator emits no code\\nfor an assert statement when optimization is requested at compile\\ntime. Note that it is unnecessary to include the source code for the\\nexpression that failed in the error message; it will be displayed as\\npart of the stack trace.\\n\\nAssignments to ``__debug__`` are illegal. The value for the built-in\\nvariable is determined when the interpreter starts.\\n',\n'assignment':'\\nAssignment statements\\n*********************\\n\\nAssignment statements are used to (re)bind names to values and to\\nmodify attributes or items of mutable objects:\\n\\n assignment_stmt ::= (target_list \"=\")+ (expression_list | yield_expression)\\n target_list ::= target (\",\" target)* [\",\"]\\n target ::= identifier\\n | \"(\" target_list \")\"\\n | \"[\" target_list \"]\"\\n | attributeref\\n | subscription\\n | slicing\\n | \"*\" target\\n\\n(See section *Primaries* for the syntax definitions for the last three\\nsymbols.)\\n\\nAn assignment statement evaluates the expression list (remember that\\nthis can be a single expression or a comma-separated list, the latter\\nyielding a tuple) and assigns the single resulting object to each of\\nthe target lists, from left to right.\\n\\nAssignment is defined recursively depending on the form of the target\\n(list). When a target is part of a mutable object (an attribute\\nreference, subscription or slicing), the mutable object must\\nultimately perform the assignment and decide about its validity, and\\nmay raise an exception if the assignment is unacceptable. The rules\\nobserved by various types and the exceptions raised are given with the\\ndefinition of the object types (see section *The standard type\\nhierarchy*).\\n\\nAssignment of an object to a target list, optionally enclosed in\\nparentheses or square brackets, is recursively defined as follows.\\n\\n* If the target list is a single target: The object is assigned to\\n that target.\\n\\n* If the target list is a comma-separated list of targets: The object\\n must be an iterable with the same number of items as there are\\n targets in the target list, and the items are assigned, from left to\\n right, to the corresponding targets.\\n\\n * If the target list contains one target prefixed with an asterisk,\\n called a \"starred\" target: The object must be a sequence with at\\n least as many items as there are targets in the target list, minus\\n one. The first items of the sequence are assigned, from left to\\n right, to the targets before the starred target. The final items\\n of the sequence are assigned to the targets after the starred\\n target. A list of the remaining items in the sequence is then\\n assigned to the starred target (the list can be empty).\\n\\n * Else: The object must be a sequence with the same number of items\\n as there are targets in the target list, and the items are\\n assigned, from left to right, to the corresponding targets.\\n\\nAssignment of an object to a single target is recursively defined as\\nfollows.\\n\\n* If the target is an identifier (name):\\n\\n * If the name does not occur in a ``global`` or ``nonlocal``\\n statement in the current code block: the name is bound to the\\n object in the current local namespace.\\n\\n * Otherwise: the name is bound to the object in the global namespace\\n or the outer namespace determined by ``nonlocal``, respectively.\\n\\n The name is rebound if it was already bound. This may cause the\\n reference count for the object previously bound to the name to reach\\n zero, causing the object to be deallocated and its destructor (if it\\n has one) to be called.\\n\\n* If the target is a target list enclosed in parentheses or in square\\n brackets: The object must be an iterable with the same number of\\n items as there are targets in the target list, and its items are\\n assigned, from left to right, to the corresponding targets.\\n\\n* If the target is an attribute reference: The primary expression in\\n the reference is evaluated. It should yield an object with\\n assignable attributes; if this is not the case, ``TypeError`` is\\n raised. That object is then asked to assign the assigned object to\\n the given attribute; if it cannot perform the assignment, it raises\\n an exception (usually but not necessarily ``AttributeError``).\\n\\n Note: If the object is a class instance and the attribute reference\\n occurs on both sides of the assignment operator, the RHS expression,\\n ``a.x`` can access either an instance attribute or (if no instance\\n attribute exists) a class attribute. The LHS target ``a.x`` is\\n always set as an instance attribute, creating it if necessary.\\n Thus, the two occurrences of ``a.x`` do not necessarily refer to the\\n same attribute: if the RHS expression refers to a class attribute,\\n the LHS creates a new instance attribute as the target of the\\n assignment:\\n\\n class Cls:\\n x = 3 # class variable\\n inst = Cls()\\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\\n\\n This description does not necessarily apply to descriptor\\n attributes, such as properties created with ``property()``.\\n\\n* If the target is a subscription: The primary expression in the\\n reference is evaluated. It should yield either a mutable sequence\\n object (such as a list) or a mapping object (such as a dictionary).\\n Next, the subscript expression is evaluated.\\n\\n If the primary is a mutable sequence object (such as a list), the\\n subscript must yield an integer. If it is negative, the sequence\\'s\\n length is added to it. The resulting value must be a nonnegative\\n integer less than the sequence\\'s length, and the sequence is asked\\n to assign the assigned object to its item with that index. If the\\n index is out of range, ``IndexError`` is raised (assignment to a\\n subscripted sequence cannot add new items to a list).\\n\\n If the primary is a mapping object (such as a dictionary), the\\n subscript must have a type compatible with the mapping\\'s key type,\\n and the mapping is then asked to create a key/datum pair which maps\\n the subscript to the assigned object. This can either replace an\\n existing key/value pair with the same key value, or insert a new\\n key/value pair (if no key with the same value existed).\\n\\n For user-defined objects, the ``__setitem__()`` method is called\\n with appropriate arguments.\\n\\n* If the target is a slicing: The primary expression in the reference\\n is evaluated. It should yield a mutable sequence object (such as a\\n list). The assigned object should be a sequence object of the same\\n type. Next, the lower and upper bound expressions are evaluated,\\n insofar they are present; defaults are zero and the sequence\\'s\\n length. The bounds should evaluate to integers. If either bound is\\n negative, the sequence\\'s length is added to it. The resulting\\n bounds are clipped to lie between zero and the sequence\\'s length,\\n inclusive. Finally, the sequence object is asked to replace the\\n slice with the items of the assigned sequence. The length of the\\n slice may be different from the length of the assigned sequence,\\n thus changing the length of the target sequence, if the object\\n allows it.\\n\\n**CPython implementation detail:** In the current implementation, the\\nsyntax for targets is taken to be the same as for expressions, and\\ninvalid syntax is rejected during the code generation phase, causing\\nless detailed error messages.\\n\\nWARNING: Although the definition of assignment implies that overlaps\\nbetween the left-hand side and the right-hand side are \\'safe\\' (for\\nexample ``a, b = b, a`` swaps two variables), overlaps *within* the\\ncollection of assigned-to variables are not safe! For instance, the\\nfollowing program prints ``[0, 2]``:\\n\\n x = [0, 1]\\n i = 0\\n i, x[i] = 1, 2\\n print(x)\\n\\nSee also:\\n\\n **PEP 3132** - Extended Iterable Unpacking\\n The specification for the ``*target`` feature.\\n\\n\\nAugmented assignment statements\\n===============================\\n\\nAugmented assignment is the combination, in a single statement, of a\\nbinary operation and an assignment statement:\\n\\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\\n augtarget ::= identifier | attributeref | subscription | slicing\\n augop ::= \"+=\" | \"-=\" | \"*=\" | \"/=\" | \"//=\" | \"%=\" | \"**=\"\\n | \">>=\" | \"<<=\" | \"&=\" | \"^=\" | \"|=\"\\n\\n(See section *Primaries* for the syntax definitions for the last three\\nsymbols.)\\n\\nAn augmented assignment evaluates the target (which, unlike normal\\nassignment statements, cannot be an unpacking) and the expression\\nlist, performs the binary operation specific to the type of assignment\\non the two operands, and assigns the result to the original target.\\nThe target is only evaluated once.\\n\\nAn augmented assignment expression like ``x += 1`` can be rewritten as\\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\\nthe augmented version, ``x`` is only evaluated once. Also, when\\npossible, the actual operation is performed *in-place*, meaning that\\nrather than creating a new object and assigning that to the target,\\nthe old object is modified instead.\\n\\nWith the exception of assigning to tuples and multiple targets in a\\nsingle statement, the assignment done by augmented assignment\\nstatements is handled the same way as normal assignments. Similarly,\\nwith the exception of the possible *in-place* behavior, the binary\\noperation performed by augmented assignment is the same as the normal\\nbinary operations.\\n\\nFor targets which are attribute references, the same *caveat about\\nclass and instance attributes* applies as for regular assignments.\\n',\n'atom-identifiers':'\\nIdentifiers (Names)\\n*******************\\n\\nAn identifier occurring as an atom is a name. See section\\n*Identifiers and keywords* for lexical definition and section *Naming\\nand binding* for documentation of naming and binding.\\n\\nWhen the name is bound to an object, evaluation of the atom yields\\nthat object. When a name is not bound, an attempt to evaluate it\\nraises a ``NameError`` exception.\\n\\n**Private name mangling:** When an identifier that textually occurs in\\na class definition begins with two or more underscore characters and\\ndoes not end in two or more underscores, it is considered a *private\\nname* of that class. Private names are transformed to a longer form\\nbefore code is generated for them. The transformation inserts the\\nclass name in front of the name, with leading underscores removed, and\\na single underscore inserted in front of the class name. For example,\\nthe identifier ``__spam`` occurring in a class named ``Ham`` will be\\ntransformed to ``_Ham__spam``. This transformation is independent of\\nthe syntactical context in which the identifier is used. If the\\ntransformed name is extremely long (longer than 255 characters),\\nimplementation defined truncation may happen. If the class name\\nconsists only of underscores, no transformation is done.\\n',\n'atom-literals':\"\\nLiterals\\n********\\n\\nPython supports string and bytes literals and various numeric\\nliterals:\\n\\n literal ::= stringliteral | bytesliteral\\n | integer | floatnumber | imagnumber\\n\\nEvaluation of a literal yields an object of the given type (string,\\nbytes, integer, floating point number, complex number) with the given\\nvalue. The value may be approximated in the case of floating point\\nand imaginary (complex) literals. See section *Literals* for details.\\n\\nAll literals correspond to immutable data types, and hence the\\nobject's identity is less important than its value. Multiple\\nevaluations of literals with the same value (either the same\\noccurrence in the program text or a different occurrence) may obtain\\nthe same object or a different object with the same value.\\n\",\n'attribute-access':'\\nCustomizing attribute access\\n****************************\\n\\nThe following methods can be defined to customize the meaning of\\nattribute access (use of, assignment to, or deletion of ``x.name``)\\nfor class instances.\\n\\nobject.__getattr__(self, name)\\n\\n Called when an attribute lookup has not found the attribute in the\\n usual places (i.e. it is not an instance attribute nor is it found\\n in the class tree for ``self``). ``name`` is the attribute name.\\n This method should return the (computed) attribute value or raise\\n an ``AttributeError`` exception.\\n\\n Note that if the attribute is found through the normal mechanism,\\n ``__getattr__()`` is not called. (This is an intentional asymmetry\\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\\n for efficiency reasons and because otherwise ``__getattr__()``\\n would have no way to access other attributes of the instance. Note\\n that at least for instance variables, you can fake total control by\\n not inserting any values in the instance attribute dictionary (but\\n instead inserting them in another object). See the\\n ``__getattribute__()`` method below for a way to actually get total\\n control over attribute access.\\n\\nobject.__getattribute__(self, name)\\n\\n Called unconditionally to implement attribute accesses for\\n instances of the class. If the class also defines\\n ``__getattr__()``, the latter will not be called unless\\n ``__getattribute__()`` either calls it explicitly or raises an\\n ``AttributeError``. This method should return the (computed)\\n attribute value or raise an ``AttributeError`` exception. In order\\n to avoid infinite recursion in this method, its implementation\\n should always call the base class method with the same name to\\n access any attributes it needs, for example,\\n ``object.__getattribute__(self, name)``.\\n\\n Note: This method may still be bypassed when looking up special methods\\n as the result of implicit invocation via language syntax or\\n built-in functions. See *Special method lookup*.\\n\\nobject.__setattr__(self, name, value)\\n\\n Called when an attribute assignment is attempted. This is called\\n instead of the normal mechanism (i.e. store the value in the\\n instance dictionary). *name* is the attribute name, *value* is the\\n value to be assigned to it.\\n\\n If ``__setattr__()`` wants to assign to an instance attribute, it\\n should call the base class method with the same name, for example,\\n ``object.__setattr__(self, name, value)``.\\n\\nobject.__delattr__(self, name)\\n\\n Like ``__setattr__()`` but for attribute deletion instead of\\n assignment. This should only be implemented if ``del obj.name`` is\\n meaningful for the object.\\n\\nobject.__dir__(self)\\n\\n Called when ``dir()`` is called on the object. A sequence must be\\n returned. ``dir()`` converts the returned sequence to a list and\\n sorts it.\\n\\n\\nImplementing Descriptors\\n========================\\n\\nThe following methods only apply when an instance of the class\\ncontaining the method (a so-called *descriptor* class) appears in an\\n*owner* class (the descriptor must be in either the owner\\'s class\\ndictionary or in the class dictionary for one of its parents). In the\\nexamples below, \"the attribute\" refers to the attribute whose name is\\nthe key of the property in the owner class\\' ``__dict__``.\\n\\nobject.__get__(self, instance, owner)\\n\\n Called to get the attribute of the owner class (class attribute\\n access) or of an instance of that class (instance attribute\\n access). *owner* is always the owner class, while *instance* is the\\n instance that the attribute was accessed through, or ``None`` when\\n the attribute is accessed through the *owner*. This method should\\n return the (computed) attribute value or raise an\\n ``AttributeError`` exception.\\n\\nobject.__set__(self, instance, value)\\n\\n Called to set the attribute on an instance *instance* of the owner\\n class to a new value, *value*.\\n\\nobject.__delete__(self, instance)\\n\\n Called to delete the attribute on an instance *instance* of the\\n owner class.\\n\\n\\nInvoking Descriptors\\n====================\\n\\nIn general, a descriptor is an object attribute with \"binding\\nbehavior\", one whose attribute access has been overridden by methods\\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\\n``__delete__()``. If any of those methods are defined for an object,\\nit is said to be a descriptor.\\n\\nThe default behavior for attribute access is to get, set, or delete\\nthe attribute from an object\\'s dictionary. For instance, ``a.x`` has a\\nlookup chain starting with ``a.__dict__[\\'x\\']``, then\\n``type(a).__dict__[\\'x\\']``, and continuing through the base classes of\\n``type(a)`` excluding metaclasses.\\n\\nHowever, if the looked-up value is an object defining one of the\\ndescriptor methods, then Python may override the default behavior and\\ninvoke the descriptor method instead. Where this occurs in the\\nprecedence chain depends on which descriptor methods were defined and\\nhow they were called.\\n\\nThe starting point for descriptor invocation is a binding, ``a.x``.\\nHow the arguments are assembled depends on ``a``:\\n\\nDirect Call\\n The simplest and least common call is when user code directly\\n invokes a descriptor method: ``x.__get__(a)``.\\n\\nInstance Binding\\n If binding to an object instance, ``a.x`` is transformed into the\\n call: ``type(a).__dict__[\\'x\\'].__get__(a, type(a))``.\\n\\nClass Binding\\n If binding to a class, ``A.x`` is transformed into the call:\\n ``A.__dict__[\\'x\\'].__get__(None, A)``.\\n\\nSuper Binding\\n If ``a`` is an instance of ``super``, then the binding ``super(B,\\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\\n ``A`` immediately preceding ``B`` and then invokes the descriptor\\n with the call: ``A.__dict__[\\'m\\'].__get__(obj, obj.__class__)``.\\n\\nFor instance bindings, the precedence of descriptor invocation depends\\non the which descriptor methods are defined. A descriptor can define\\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\\nIf it does not define ``__get__()``, then accessing the attribute will\\nreturn the descriptor object itself unless there is a value in the\\nobject\\'s instance dictionary. If the descriptor defines ``__set__()``\\nand/or ``__delete__()``, it is a data descriptor; if it defines\\nneither, it is a non-data descriptor. Normally, data descriptors\\ndefine both ``__get__()`` and ``__set__()``, while non-data\\ndescriptors have just the ``__get__()`` method. Data descriptors with\\n``__set__()`` and ``__get__()`` defined always override a redefinition\\nin an instance dictionary. In contrast, non-data descriptors can be\\noverridden by instances.\\n\\nPython methods (including ``staticmethod()`` and ``classmethod()``)\\nare implemented as non-data descriptors. Accordingly, instances can\\nredefine and override methods. This allows individual instances to\\nacquire behaviors that differ from other instances of the same class.\\n\\nThe ``property()`` function is implemented as a data descriptor.\\nAccordingly, instances cannot override the behavior of a property.\\n\\n\\n__slots__\\n=========\\n\\nBy default, instances of classes have a dictionary for attribute\\nstorage. This wastes space for objects having very few instance\\nvariables. The space consumption can become acute when creating large\\nnumbers of instances.\\n\\nThe default can be overridden by defining *__slots__* in a class\\ndefinition. The *__slots__* declaration takes a sequence of instance\\nvariables and reserves just enough space in each instance to hold a\\nvalue for each variable. Space is saved because *__dict__* is not\\ncreated for each instance.\\n\\nobject.__slots__\\n\\n This class variable can be assigned a string, iterable, or sequence\\n of strings with variable names used by instances. If defined in a\\n class, *__slots__* reserves space for the declared variables and\\n prevents the automatic creation of *__dict__* and *__weakref__* for\\n each instance.\\n\\n\\nNotes on using *__slots__*\\n--------------------------\\n\\n* When inheriting from a class without *__slots__*, the *__dict__*\\n attribute of that class will always be accessible, so a *__slots__*\\n definition in the subclass is meaningless.\\n\\n* Without a *__dict__* variable, instances cannot be assigned new\\n variables not listed in the *__slots__* definition. Attempts to\\n assign to an unlisted variable name raises ``AttributeError``. If\\n dynamic assignment of new variables is desired, then add\\n ``\\'__dict__\\'`` to the sequence of strings in the *__slots__*\\n declaration.\\n\\n* Without a *__weakref__* variable for each instance, classes defining\\n *__slots__* do not support weak references to its instances. If weak\\n reference support is needed, then add ``\\'__weakref__\\'`` to the\\n sequence of strings in the *__slots__* declaration.\\n\\n* *__slots__* are implemented at the class level by creating\\n descriptors (*Implementing Descriptors*) for each variable name. As\\n a result, class attributes cannot be used to set default values for\\n instance variables defined by *__slots__*; otherwise, the class\\n attribute would overwrite the descriptor assignment.\\n\\n* The action of a *__slots__* declaration is limited to the class\\n where it is defined. As a result, subclasses will have a *__dict__*\\n unless they also define *__slots__* (which must only contain names\\n of any *additional* slots).\\n\\n* If a class defines a slot also defined in a base class, the instance\\n variable defined by the base class slot is inaccessible (except by\\n retrieving its descriptor directly from the base class). This\\n renders the meaning of the program undefined. In the future, a\\n check may be added to prevent this.\\n\\n* Nonempty *__slots__* does not work for classes derived from\\n \"variable-length\" built-in types such as ``int``, ``str`` and\\n ``tuple``.\\n\\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\\n also be used; however, in the future, special meaning may be\\n assigned to the values corresponding to each key.\\n\\n* *__class__* assignment works only if both classes have the same\\n *__slots__*.\\n',\n'attribute-references':'\\nAttribute references\\n********************\\n\\nAn attribute reference is a primary followed by a period and a name:\\n\\n attributeref ::= primary \".\" identifier\\n\\nThe primary must evaluate to an object of a type that supports\\nattribute references, which most objects do. This object is then\\nasked to produce the attribute whose name is the identifier (which can\\nbe customized by overriding the ``__getattr__()`` method). If this\\nattribute is not available, the exception ``AttributeError`` is\\nraised. Otherwise, the type and value of the object produced is\\ndetermined by the object. Multiple evaluations of the same attribute\\nreference may yield different objects.\\n',\n'augassign':'\\nAugmented assignment statements\\n*******************************\\n\\nAugmented assignment is the combination, in a single statement, of a\\nbinary operation and an assignment statement:\\n\\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\\n augtarget ::= identifier | attributeref | subscription | slicing\\n augop ::= \"+=\" | \"-=\" | \"*=\" | \"/=\" | \"//=\" | \"%=\" | \"**=\"\\n | \">>=\" | \"<<=\" | \"&=\" | \"^=\" | \"|=\"\\n\\n(See section *Primaries* for the syntax definitions for the last three\\nsymbols.)\\n\\nAn augmented assignment evaluates the target (which, unlike normal\\nassignment statements, cannot be an unpacking) and the expression\\nlist, performs the binary operation specific to the type of assignment\\non the two operands, and assigns the result to the original target.\\nThe target is only evaluated once.\\n\\nAn augmented assignment expression like ``x += 1`` can be rewritten as\\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\\nthe augmented version, ``x`` is only evaluated once. Also, when\\npossible, the actual operation is performed *in-place*, meaning that\\nrather than creating a new object and assigning that to the target,\\nthe old object is modified instead.\\n\\nWith the exception of assigning to tuples and multiple targets in a\\nsingle statement, the assignment done by augmented assignment\\nstatements is handled the same way as normal assignments. Similarly,\\nwith the exception of the possible *in-place* behavior, the binary\\noperation performed by augmented assignment is the same as the normal\\nbinary operations.\\n\\nFor targets which are attribute references, the same *caveat about\\nclass and instance attributes* applies as for regular assignments.\\n',\n'binary':'\\nBinary arithmetic operations\\n****************************\\n\\nThe binary arithmetic operations have the conventional priority\\nlevels. Note that some of these operations also apply to certain non-\\nnumeric types. Apart from the power operator, there are only two\\nlevels, one for multiplicative operators and one for additive\\noperators:\\n\\n m_expr ::= u_expr | m_expr \"*\" u_expr | m_expr \"//\" u_expr | m_expr \"/\" u_expr\\n | m_expr \"%\" u_expr\\n a_expr ::= m_expr | a_expr \"+\" m_expr | a_expr \"-\" m_expr\\n\\nThe ``*`` (multiplication) operator yields the product of its\\narguments. The arguments must either both be numbers, or one argument\\nmust be an integer and the other must be a sequence. In the former\\ncase, the numbers are converted to a common type and then multiplied\\ntogether. In the latter case, sequence repetition is performed; a\\nnegative repetition factor yields an empty sequence.\\n\\nThe ``/`` (division) and ``//`` (floor division) operators yield the\\nquotient of their arguments. The numeric arguments are first\\nconverted to a common type. Integer division yields a float, while\\nfloor division of integers results in an integer; the result is that\\nof mathematical division with the \\'floor\\' function applied to the\\nresult. Division by zero raises the ``ZeroDivisionError`` exception.\\n\\nThe ``%`` (modulo) operator yields the remainder from the division of\\nthe first argument by the second. The numeric arguments are first\\nconverted to a common type. A zero right argument raises the\\n``ZeroDivisionError`` exception. The arguments may be floating point\\nnumbers, e.g., ``3.14%0.7`` equals ``0.34`` (since ``3.14`` equals\\n``4*0.7 + 0.34``.) The modulo operator always yields a result with\\nthe same sign as its second operand (or zero); the absolute value of\\nthe result is strictly smaller than the absolute value of the second\\noperand [1].\\n\\nThe floor division and modulo operators are connected by the following\\nidentity: ``x == (x//y)*y + (x%y)``. Floor division and modulo are\\nalso connected with the built-in function ``divmod()``: ``divmod(x, y)\\n== (x//y, x%y)``. [2].\\n\\nIn addition to performing the modulo operation on numbers, the ``%``\\noperator is also overloaded by string objects to perform old-style\\nstring formatting (also known as interpolation). The syntax for\\nstring formatting is described in the Python Library Reference,\\nsection *printf-style String Formatting*.\\n\\nThe floor division operator, the modulo operator, and the ``divmod()``\\nfunction are not defined for complex numbers. Instead, convert to a\\nfloating point number using the ``abs()`` function if appropriate.\\n\\nThe ``+`` (addition) operator yields the sum of its arguments. The\\narguments must either both be numbers or both sequences of the same\\ntype. In the former case, the numbers are converted to a common type\\nand then added together. In the latter case, the sequences are\\nconcatenated.\\n\\nThe ``-`` (subtraction) operator yields the difference of its\\narguments. The numeric arguments are first converted to a common\\ntype.\\n',\n'bitwise':'\\nBinary bitwise operations\\n*************************\\n\\nEach of the three bitwise operations has a different priority level:\\n\\n and_expr ::= shift_expr | and_expr \"&\" shift_expr\\n xor_expr ::= and_expr | xor_expr \"^\" and_expr\\n or_expr ::= xor_expr | or_expr \"|\" xor_expr\\n\\nThe ``&`` operator yields the bitwise AND of its arguments, which must\\nbe integers.\\n\\nThe ``^`` operator yields the bitwise XOR (exclusive OR) of its\\narguments, which must be integers.\\n\\nThe ``|`` operator yields the bitwise (inclusive) OR of its arguments,\\nwhich must be integers.\\n',\n'bltin-code-objects':'\\nCode Objects\\n************\\n\\nCode objects are used by the implementation to represent \"pseudo-\\ncompiled\" executable Python code such as a function body. They differ\\nfrom function objects because they don\\'t contain a reference to their\\nglobal execution environment. Code objects are returned by the built-\\nin ``compile()`` function and can be extracted from function objects\\nthrough their ``__code__`` attribute. See also the ``code`` module.\\n\\nA code object can be executed or evaluated by passing it (instead of a\\nsource string) to the ``exec()`` or ``eval()`` built-in functions.\\n\\nSee *The standard type hierarchy* for more information.\\n',\n'bltin-ellipsis-object':'\\nThe Ellipsis Object\\n*******************\\n\\nThis object is commonly used by slicing (see *Slicings*). It supports\\nno special operations. There is exactly one ellipsis object, named\\n``Ellipsis`` (a built-in name). ``type(Ellipsis)()`` produces the\\n``Ellipsis`` singleton.\\n\\nIt is written as ``Ellipsis`` or ``...``.\\n',\n'bltin-null-object':\"\\nThe Null Object\\n***************\\n\\nThis object is returned by functions that don't explicitly return a\\nvalue. It supports no special operations. There is exactly one null\\nobject, named ``None`` (a built-in name). ``type(None)()`` produces\\nthe same singleton.\\n\\nIt is written as ``None``.\\n\",\n'bltin-type-objects':\"\\nType Objects\\n************\\n\\nType objects represent the various object types. An object's type is\\naccessed by the built-in function ``type()``. There are no special\\noperations on types. The standard module ``types`` defines names for\\nall standard built-in types.\\n\\nTypes are written like this: ````.\\n\",\n'booleans':'\\nBoolean operations\\n******************\\n\\n or_test ::= and_test | or_test \"or\" and_test\\n and_test ::= not_test | and_test \"and\" not_test\\n not_test ::= comparison | \"not\" not_test\\n\\nIn the context of Boolean operations, and also when expressions are\\nused by control flow statements, the following values are interpreted\\nas false: ``False``, ``None``, numeric zero of all types, and empty\\nstrings and containers (including strings, tuples, lists,\\ndictionaries, sets and frozensets). All other values are interpreted\\nas true. User-defined objects can customize their truth value by\\nproviding a ``__bool__()`` method.\\n\\nThe operator ``not`` yields ``True`` if its argument is false,\\n``False`` otherwise.\\n\\nThe expression ``x and y`` first evaluates *x*; if *x* is false, its\\nvalue is returned; otherwise, *y* is evaluated and the resulting value\\nis returned.\\n\\nThe expression ``x or y`` first evaluates *x*; if *x* is true, its\\nvalue is returned; otherwise, *y* is evaluated and the resulting value\\nis returned.\\n\\n(Note that neither ``and`` nor ``or`` restrict the value and type they\\nreturn to ``False`` and ``True``, but rather return the last evaluated\\nargument. This is sometimes useful, e.g., if ``s`` is a string that\\nshould be replaced by a default value if it is empty, the expression\\n``s or \\'foo\\'`` yields the desired value. Because ``not`` has to\\ninvent a value anyway, it does not bother to return a value of the\\nsame type as its argument, so e.g., ``not \\'foo\\'`` yields ``False``,\\nnot ``\\'\\'``.)\\n',\n'break':'\\nThe ``break`` statement\\n***********************\\n\\n break_stmt ::= \"break\"\\n\\n``break`` may only occur syntactically nested in a ``for`` or\\n``while`` loop, but not nested in a function or class definition\\nwithin that loop.\\n\\nIt terminates the nearest enclosing loop, skipping the optional\\n``else`` clause if the loop has one.\\n\\nIf a ``for`` loop is terminated by ``break``, the loop control target\\nkeeps its current value.\\n\\nWhen ``break`` passes control out of a ``try`` statement with a\\n``finally`` clause, that ``finally`` clause is executed before really\\nleaving the loop.\\n',\n'callable-types':'\\nEmulating callable objects\\n**************************\\n\\nobject.__call__(self[, args...])\\n\\n Called when the instance is \"called\" as a function; if this method\\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\\n ``x.__call__(arg1, arg2, ...)``.\\n',\n'calls':'\\nCalls\\n*****\\n\\nA call calls a callable object (e.g., a *function*) with a possibly\\nempty series of *arguments*:\\n\\n call ::= primary \"(\" [argument_list [\",\"] | comprehension] \")\"\\n argument_list ::= positional_arguments [\",\" keyword_arguments]\\n [\",\" \"*\" expression] [\",\" keyword_arguments]\\n [\",\" \"**\" expression]\\n | keyword_arguments [\",\" \"*\" expression]\\n [\",\" keyword_arguments] [\",\" \"**\" expression]\\n | \"*\" expression [\",\" keyword_arguments] [\",\" \"**\" expression]\\n | \"**\" expression\\n positional_arguments ::= expression (\",\" expression)*\\n keyword_arguments ::= keyword_item (\",\" keyword_item)*\\n keyword_item ::= identifier \"=\" expression\\n\\nA trailing comma may be present after the positional and keyword\\narguments but does not affect the semantics.\\n\\nThe primary must evaluate to a callable object (user-defined\\nfunctions, built-in functions, methods of built-in objects, class\\nobjects, methods of class instances, and all objects having a\\n``__call__()`` method are callable). All argument expressions are\\nevaluated before the call is attempted. Please refer to section\\n*Function definitions* for the syntax of formal *parameter* lists.\\n\\nIf keyword arguments are present, they are first converted to\\npositional arguments, as follows. First, a list of unfilled slots is\\ncreated for the formal parameters. If there are N positional\\narguments, they are placed in the first N slots. Next, for each\\nkeyword argument, the identifier is used to determine the\\ncorresponding slot (if the identifier is the same as the first formal\\nparameter name, the first slot is used, and so on). If the slot is\\nalready filled, a ``TypeError`` exception is raised. Otherwise, the\\nvalue of the argument is placed in the slot, filling it (even if the\\nexpression is ``None``, it fills the slot). When all arguments have\\nbeen processed, the slots that are still unfilled are filled with the\\ncorresponding default value from the function definition. (Default\\nvalues are calculated, once, when the function is defined; thus, a\\nmutable object such as a list or dictionary used as default value will\\nbe shared by all calls that don\\'t specify an argument value for the\\ncorresponding slot; this should usually be avoided.) If there are any\\nunfilled slots for which no default value is specified, a\\n``TypeError`` exception is raised. Otherwise, the list of filled\\nslots is used as the argument list for the call.\\n\\n**CPython implementation detail:** An implementation may provide\\nbuilt-in functions whose positional parameters do not have names, even\\nif they are \\'named\\' for the purpose of documentation, and which\\ntherefore cannot be supplied by keyword. In CPython, this is the case\\nfor functions implemented in C that use ``PyArg_ParseTuple()`` to\\nparse their arguments.\\n\\nIf there are more positional arguments than there are formal parameter\\nslots, a ``TypeError`` exception is raised, unless a formal parameter\\nusing the syntax ``*identifier`` is present; in this case, that formal\\nparameter receives a tuple containing the excess positional arguments\\n(or an empty tuple if there were no excess positional arguments).\\n\\nIf any keyword argument does not correspond to a formal parameter\\nname, a ``TypeError`` exception is raised, unless a formal parameter\\nusing the syntax ``**identifier`` is present; in this case, that\\nformal parameter receives a dictionary containing the excess keyword\\narguments (using the keywords as keys and the argument values as\\ncorresponding values), or a (new) empty dictionary if there were no\\nexcess keyword arguments.\\n\\nIf the syntax ``*expression`` appears in the function call,\\n``expression`` must evaluate to an iterable. Elements from this\\niterable are treated as if they were additional positional arguments;\\nif there are positional arguments *x1*, ..., *xN*, and ``expression``\\nevaluates to a sequence *y1*, ..., *yM*, this is equivalent to a call\\nwith M+N positional arguments *x1*, ..., *xN*, *y1*, ..., *yM*.\\n\\nA consequence of this is that although the ``*expression`` syntax may\\nappear *after* some keyword arguments, it is processed *before* the\\nkeyword arguments (and the ``**expression`` argument, if any -- see\\nbelow). So:\\n\\n >>> def f(a, b):\\n ... print(a, b)\\n ...\\n >>> f(b=1, *(2,))\\n 2 1\\n >>> f(a=1, *(2,))\\n Traceback (most recent call last):\\n File \"\", line 1, in ?\\n TypeError: f() got multiple values for keyword argument \\'a\\'\\n >>> f(1, *(2,))\\n 1 2\\n\\nIt is unusual for both keyword arguments and the ``*expression``\\nsyntax to be used in the same call, so in practice this confusion does\\nnot arise.\\n\\nIf the syntax ``**expression`` appears in the function call,\\n``expression`` must evaluate to a mapping, the contents of which are\\ntreated as additional keyword arguments. In the case of a keyword\\nappearing in both ``expression`` and as an explicit keyword argument,\\na ``TypeError`` exception is raised.\\n\\nFormal parameters using the syntax ``*identifier`` or ``**identifier``\\ncannot be used as positional argument slots or as keyword argument\\nnames.\\n\\nA call always returns some value, possibly ``None``, unless it raises\\nan exception. How this value is computed depends on the type of the\\ncallable object.\\n\\nIf it is---\\n\\na user-defined function:\\n The code block for the function is executed, passing it the\\n argument list. The first thing the code block will do is bind the\\n formal parameters to the arguments; this is described in section\\n *Function definitions*. When the code block executes a ``return``\\n statement, this specifies the return value of the function call.\\n\\na built-in function or method:\\n The result is up to the interpreter; see *Built-in Functions* for\\n the descriptions of built-in functions and methods.\\n\\na class object:\\n A new instance of that class is returned.\\n\\na class instance method:\\n The corresponding user-defined function is called, with an argument\\n list that is one longer than the argument list of the call: the\\n instance becomes the first argument.\\n\\na class instance:\\n The class must define a ``__call__()`` method; the effect is then\\n the same as if that method was called.\\n',\n'class':'\\nClass definitions\\n*****************\\n\\nA class definition defines a class object (see section *The standard\\ntype hierarchy*):\\n\\n classdef ::= [decorators] \"class\" classname [inheritance] \":\" suite\\n inheritance ::= \"(\" [parameter_list] \")\"\\n classname ::= identifier\\n\\nA class definition is an executable statement. The inheritance list\\nusually gives a list of base classes (see *Customizing class creation*\\nfor more advanced uses), so each item in the list should evaluate to a\\nclass object which allows subclassing. Classes without an inheritance\\nlist inherit, by default, from the base class ``object``; hence,\\n\\n class Foo:\\n pass\\n\\nis equivalent to\\n\\n class Foo(object):\\n pass\\n\\nThe class\\'s suite is then executed in a new execution frame (see\\n*Naming and binding*), using a newly created local namespace and the\\noriginal global namespace. (Usually, the suite contains mostly\\nfunction definitions.) When the class\\'s suite finishes execution, its\\nexecution frame is discarded but its local namespace is saved. [4] A\\nclass object is then created using the inheritance list for the base\\nclasses and the saved local namespace for the attribute dictionary.\\nThe class name is bound to this class object in the original local\\nnamespace.\\n\\nClass creation can be customized heavily using *metaclasses*.\\n\\nClasses can also be decorated: just like when decorating functions,\\n\\n @f1(arg)\\n @f2\\n class Foo: pass\\n\\nis equivalent to\\n\\n class Foo: pass\\n Foo = f1(arg)(f2(Foo))\\n\\nThe evaluation rules for the decorator expressions are the same as for\\nfunction decorators. The result must be a class object, which is then\\nbound to the class name.\\n\\n**Programmer\\'s note:** Variables defined in the class definition are\\nclass attributes; they are shared by instances. Instance attributes\\ncan be set in a method with ``self.name = value``. Both class and\\ninstance attributes are accessible through the notation\\n\"``self.name``\", and an instance attribute hides a class attribute\\nwith the same name when accessed in this way. Class attributes can be\\nused as defaults for instance attributes, but using mutable values\\nthere can lead to unexpected results. *Descriptors* can be used to\\ncreate instance variables with different implementation details.\\n\\nSee also:\\n\\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\\n Decorators\\n\\n-[ Footnotes ]-\\n\\n[1] The exception is propagated to the invocation stack unless there\\n is a ``finally`` clause which happens to raise another exception.\\n That new exception causes the old one to be lost.\\n\\n[2] Currently, control \"flows off the end\" except in the case of an\\n exception or the execution of a ``return``, ``continue``, or\\n ``break`` statement.\\n\\n[3] A string literal appearing as the first statement in the function\\n body is transformed into the function\\'s ``__doc__`` attribute and\\n therefore the function\\'s *docstring*.\\n\\n[4] A string literal appearing as the first statement in the class\\n body is transformed into the namespace\\'s ``__doc__`` item and\\n therefore the class\\'s *docstring*.\\n',\n'comparisons':'\\nComparisons\\n***********\\n\\nUnlike C, all comparison operations in Python have the same priority,\\nwhich is lower than that of any arithmetic, shifting or bitwise\\noperation. Also unlike C, expressions like ``a < b < c`` have the\\ninterpretation that is conventional in mathematics:\\n\\n comparison ::= or_expr ( comp_operator or_expr )*\\n comp_operator ::= \"<\" | \">\" | \"==\" | \">=\" | \"<=\" | \"!=\"\\n | \"is\" [\"not\"] | [\"not\"] \"in\"\\n\\nComparisons yield boolean values: ``True`` or ``False``.\\n\\nComparisons can be chained arbitrarily, e.g., ``x < y <= z`` is\\nequivalent to ``x < y and y <= z``, except that ``y`` is evaluated\\nonly once (but in both cases ``z`` is not evaluated at all when ``x <\\ny`` is found to be false).\\n\\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\\n*op2*, ..., *opN* are comparison operators, then ``a op1 b op2 c ... y\\nopN z`` is equivalent to ``a op1 b and b op2 c and ... y opN z``,\\nexcept that each expression is evaluated at most once.\\n\\nNote that ``a op1 b op2 c`` doesn\\'t imply any kind of comparison\\nbetween *a* and *c*, so that, e.g., ``x < y > z`` is perfectly legal\\n(though perhaps not pretty).\\n\\nThe operators ``<``, ``>``, ``==``, ``>=``, ``<=``, and ``!=`` compare\\nthe values of two objects. The objects need not have the same type.\\nIf both are numbers, they are converted to a common type. Otherwise,\\nthe ``==`` and ``!=`` operators *always* consider objects of different\\ntypes to be unequal, while the ``<``, ``>``, ``>=`` and ``<=``\\noperators raise a ``TypeError`` when comparing objects of different\\ntypes that do not implement these operators for the given pair of\\ntypes. You can control comparison behavior of objects of non-built-in\\ntypes by defining rich comparison methods like ``__gt__()``, described\\nin section *Basic customization*.\\n\\nComparison of objects of the same type depends on the type:\\n\\n* Numbers are compared arithmetically.\\n\\n* The values ``float(\\'NaN\\')`` and ``Decimal(\\'NaN\\')`` are special. The\\n are identical to themselves, ``x is x`` but are not equal to\\n themselves, ``x != x``. Additionally, comparing any value to a\\n not-a-number value will return ``False``. For example, both ``3 <\\n float(\\'NaN\\')`` and ``float(\\'NaN\\') < 3`` will return ``False``.\\n\\n* Bytes objects are compared lexicographically using the numeric\\n values of their elements.\\n\\n* Strings are compared lexicographically using the numeric equivalents\\n (the result of the built-in function ``ord()``) of their characters.\\n [3] String and bytes object can\\'t be compared!\\n\\n* Tuples and lists are compared lexicographically using comparison of\\n corresponding elements. This means that to compare equal, each\\n element must compare equal and the two sequences must be of the same\\n type and have the same length.\\n\\n If not equal, the sequences are ordered the same as their first\\n differing elements. For example, ``[1,2,x] <= [1,2,y]`` has the\\n same value as ``x <= y``. If the corresponding element does not\\n exist, the shorter sequence is ordered first (for example, ``[1,2] <\\n [1,2,3]``).\\n\\n* Mappings (dictionaries) compare equal if and only if they have the\\n same ``(key, value)`` pairs. Order comparisons ``(\\'<\\', \\'<=\\', \\'>=\\',\\n \\'>\\')`` raise ``TypeError``.\\n\\n* Sets and frozensets define comparison operators to mean subset and\\n superset tests. Those relations do not define total orderings (the\\n two sets ``{1,2}`` and {2,3} are not equal, nor subsets of one\\n another, nor supersets of one another). Accordingly, sets are not\\n appropriate arguments for functions which depend on total ordering.\\n For example, ``min()``, ``max()``, and ``sorted()`` produce\\n undefined results given a list of sets as inputs.\\n\\n* Most other objects of built-in types compare unequal unless they are\\n the same object; the choice whether one object is considered smaller\\n or larger than another one is made arbitrarily but consistently\\n within one execution of a program.\\n\\nComparison of objects of the differing types depends on whether either\\nof the types provide explicit support for the comparison. Most\\nnumeric types can be compared with one another. When cross-type\\ncomparison is not supported, the comparison method returns\\n``NotImplemented``.\\n\\nThe operators ``in`` and ``not in`` test for membership. ``x in s``\\nevaluates to true if *x* is a member of *s*, and false otherwise. ``x\\nnot in s`` returns the negation of ``x in s``. All built-in sequences\\nand set types support this as well as dictionary, for which ``in``\\ntests whether a the dictionary has a given key. For container types\\nsuch as list, tuple, set, frozenset, dict, or collections.deque, the\\nexpression ``x in y`` is equivalent to ``any(x is e or x == e for e in\\ny)``.\\n\\nFor the string and bytes types, ``x in y`` is true if and only if *x*\\nis a substring of *y*. An equivalent test is ``y.find(x) != -1``.\\nEmpty strings are always considered to be a substring of any other\\nstring, so ``\"\" in \"abc\"`` will return ``True``.\\n\\nFor user-defined classes which define the ``__contains__()`` method,\\n``x in y`` is true if and only if ``y.__contains__(x)`` is true.\\n\\nFor user-defined classes which do not define ``__contains__()`` but do\\ndefine ``__iter__()``, ``x in y`` is true if some value ``z`` with ``x\\n== z`` is produced while iterating over ``y``. If an exception is\\nraised during the iteration, it is as if ``in`` raised that exception.\\n\\nLastly, the old-style iteration protocol is tried: if a class defines\\n``__getitem__()``, ``x in y`` is true if and only if there is a non-\\nnegative integer index *i* such that ``x == y[i]``, and all lower\\ninteger indices do not raise ``IndexError`` exception. (If any other\\nexception is raised, it is as if ``in`` raised that exception).\\n\\nThe operator ``not in`` is defined to have the inverse true value of\\n``in``.\\n\\nThe operators ``is`` and ``is not`` test for object identity: ``x is\\ny`` is true if and only if *x* and *y* are the same object. ``x is\\nnot y`` yields the inverse truth value. [4]\\n',\n'compound':'\\nCompound statements\\n*******************\\n\\nCompound statements contain (groups of) other statements; they affect\\nor control the execution of those other statements in some way. In\\ngeneral, compound statements span multiple lines, although in simple\\nincarnations a whole compound statement may be contained in one line.\\n\\nThe ``if``, ``while`` and ``for`` statements implement traditional\\ncontrol flow constructs. ``try`` specifies exception handlers and/or\\ncleanup code for a group of statements, while the ``with`` statement\\nallows the execution of initialization and finalization code around a\\nblock of code. Function and class definitions are also syntactically\\ncompound statements.\\n\\nCompound statements consist of one or more \\'clauses.\\' A clause\\nconsists of a header and a \\'suite.\\' The clause headers of a\\nparticular compound statement are all at the same indentation level.\\nEach clause header begins with a uniquely identifying keyword and ends\\nwith a colon. A suite is a group of statements controlled by a\\nclause. A suite can be one or more semicolon-separated simple\\nstatements on the same line as the header, following the header\\'s\\ncolon, or it can be one or more indented statements on subsequent\\nlines. Only the latter form of suite can contain nested compound\\nstatements; the following is illegal, mostly because it wouldn\\'t be\\nclear to which ``if`` clause a following ``else`` clause would belong:\\n\\n if test1: if test2: print(x)\\n\\nAlso note that the semicolon binds tighter than the colon in this\\ncontext, so that in the following example, either all or none of the\\n``print()`` calls are executed:\\n\\n if x < y < z: print(x); print(y); print(z)\\n\\nSummarizing:\\n\\n compound_stmt ::= if_stmt\\n | while_stmt\\n | for_stmt\\n | try_stmt\\n | with_stmt\\n | funcdef\\n | classdef\\n suite ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT\\n statement ::= stmt_list NEWLINE | compound_stmt\\n stmt_list ::= simple_stmt (\";\" simple_stmt)* [\";\"]\\n\\nNote that statements always end in a ``NEWLINE`` possibly followed by\\na ``DEDENT``. Also note that optional continuation clauses always\\nbegin with a keyword that cannot start a statement, thus there are no\\nambiguities (the \\'dangling ``else``\\' problem is solved in Python by\\nrequiring nested ``if`` statements to be indented).\\n\\nThe formatting of the grammar rules in the following sections places\\neach clause on a separate line for clarity.\\n\\n\\nThe ``if`` statement\\n====================\\n\\nThe ``if`` statement is used for conditional execution:\\n\\n if_stmt ::= \"if\" expression \":\" suite\\n ( \"elif\" expression \":\" suite )*\\n [\"else\" \":\" suite]\\n\\nIt selects exactly one of the suites by evaluating the expressions one\\nby one until one is found to be true (see section *Boolean operations*\\nfor the definition of true and false); then that suite is executed\\n(and no other part of the ``if`` statement is executed or evaluated).\\nIf all expressions are false, the suite of the ``else`` clause, if\\npresent, is executed.\\n\\n\\nThe ``while`` statement\\n=======================\\n\\nThe ``while`` statement is used for repeated execution as long as an\\nexpression is true:\\n\\n while_stmt ::= \"while\" expression \":\" suite\\n [\"else\" \":\" suite]\\n\\nThis repeatedly tests the expression and, if it is true, executes the\\nfirst suite; if the expression is false (which may be the first time\\nit is tested) the suite of the ``else`` clause, if present, is\\nexecuted and the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ngoes back to testing the expression.\\n\\n\\nThe ``for`` statement\\n=====================\\n\\nThe ``for`` statement is used to iterate over the elements of a\\nsequence (such as a string, tuple or list) or other iterable object:\\n\\n for_stmt ::= \"for\" target_list \"in\" expression_list \":\" suite\\n [\"else\" \":\" suite]\\n\\nThe expression list is evaluated once; it should yield an iterable\\nobject. An iterator is created for the result of the\\n``expression_list``. The suite is then executed once for each item\\nprovided by the iterator, in the order of ascending indices. Each\\nitem in turn is assigned to the target list using the standard rules\\nfor assignments (see *Assignment statements*), and then the suite is\\nexecuted. When the items are exhausted (which is immediately when the\\nsequence is empty or an iterator raises a ``StopIteration``\\nexception), the suite in the ``else`` clause, if present, is executed,\\nand the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ncontinues with the next item, or with the ``else`` clause if there was\\nno next item.\\n\\nThe suite may assign to the variable(s) in the target list; this does\\nnot affect the next item assigned to it.\\n\\nNames in the target list are not deleted when the loop is finished,\\nbut if the sequence is empty, it will not have been assigned to at all\\nby the loop. Hint: the built-in function ``range()`` returns an\\niterator of integers suitable to emulate the effect of Pascal\\'s ``for\\ni := a to b do``; e.g., ``list(range(3))`` returns the list ``[0, 1,\\n2]``.\\n\\nNote: There is a subtlety when the sequence is being modified by the loop\\n (this can only occur for mutable sequences, i.e. lists). An\\n internal counter is used to keep track of which item is used next,\\n and this is incremented on each iteration. When this counter has\\n reached the length of the sequence the loop terminates. This means\\n that if the suite deletes the current (or a previous) item from the\\n sequence, the next item will be skipped (since it gets the index of\\n the current item which has already been treated). Likewise, if the\\n suite inserts an item in the sequence before the current item, the\\n current item will be treated again the next time through the loop.\\n This can lead to nasty bugs that can be avoided by making a\\n temporary copy using a slice of the whole sequence, e.g.,\\n\\n for x in a[:]:\\n if x < 0: a.remove(x)\\n\\n\\nThe ``try`` statement\\n=====================\\n\\nThe ``try`` statement specifies exception handlers and/or cleanup code\\nfor a group of statements:\\n\\n try_stmt ::= try1_stmt | try2_stmt\\n try1_stmt ::= \"try\" \":\" suite\\n (\"except\" [expression [\"as\" target]] \":\" suite)+\\n [\"else\" \":\" suite]\\n [\"finally\" \":\" suite]\\n try2_stmt ::= \"try\" \":\" suite\\n \"finally\" \":\" suite\\n\\nThe ``except`` clause(s) specify one or more exception handlers. When\\nno exception occurs in the ``try`` clause, no exception handler is\\nexecuted. When an exception occurs in the ``try`` suite, a search for\\nan exception handler is started. This search inspects the except\\nclauses in turn until one is found that matches the exception. An\\nexpression-less except clause, if present, must be last; it matches\\nany exception. For an except clause with an expression, that\\nexpression is evaluated, and the clause matches the exception if the\\nresulting object is \"compatible\" with the exception. An object is\\ncompatible with an exception if it is the class or a base class of the\\nexception object or a tuple containing an item compatible with the\\nexception.\\n\\nIf no except clause matches the exception, the search for an exception\\nhandler continues in the surrounding code and on the invocation stack.\\n[1]\\n\\nIf the evaluation of an expression in the header of an except clause\\nraises an exception, the original search for a handler is canceled and\\na search starts for the new exception in the surrounding code and on\\nthe call stack (it is treated as if the entire ``try`` statement\\nraised the exception).\\n\\nWhen a matching except clause is found, the exception is assigned to\\nthe target specified after the ``as`` keyword in that except clause,\\nif present, and the except clause\\'s suite is executed. All except\\nclauses must have an executable block. When the end of this block is\\nreached, execution continues normally after the entire try statement.\\n(This means that if two nested handlers exist for the same exception,\\nand the exception occurs in the try clause of the inner handler, the\\nouter handler will not handle the exception.)\\n\\nWhen an exception has been assigned using ``as target``, it is cleared\\nat the end of the except clause. This is as if\\n\\n except E as N:\\n foo\\n\\nwas translated to\\n\\n except E as N:\\n try:\\n foo\\n finally:\\n del N\\n\\nThis means the exception must be assigned to a different name to be\\nable to refer to it after the except clause. Exceptions are cleared\\nbecause with the traceback attached to them, they form a reference\\ncycle with the stack frame, keeping all locals in that frame alive\\nuntil the next garbage collection occurs.\\n\\nBefore an except clause\\'s suite is executed, details about the\\nexception are stored in the ``sys`` module and can be access via\\n``sys.exc_info()``. ``sys.exc_info()`` returns a 3-tuple consisting of\\nthe exception class, the exception instance and a traceback object\\n(see section *The standard type hierarchy*) identifying the point in\\nthe program where the exception occurred. ``sys.exc_info()`` values\\nare restored to their previous values (before the call) when returning\\nfrom a function that handled an exception.\\n\\nThe optional ``else`` clause is executed if and when control flows off\\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\\nare not handled by the preceding ``except`` clauses.\\n\\nIf ``finally`` is present, it specifies a \\'cleanup\\' handler. The\\n``try`` clause is executed, including any ``except`` and ``else``\\nclauses. If an exception occurs in any of the clauses and is not\\nhandled, the exception is temporarily saved. The ``finally`` clause is\\nexecuted. If there is a saved exception it is re-raised at the end of\\nthe ``finally`` clause. If the ``finally`` clause raises another\\nexception, the saved exception is set as the context of the new\\nexception. If the ``finally`` clause executes a ``return`` or\\n``break`` statement, the saved exception is discarded:\\n\\n def f():\\n try:\\n 1/0\\n finally:\\n return 42\\n\\n >>> f()\\n 42\\n\\nThe exception information is not available to the program during\\nexecution of the ``finally`` clause.\\n\\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\\nthe ``try`` suite of a ``try``...``finally`` statement, the\\n``finally`` clause is also executed \\'on the way out.\\' A ``continue``\\nstatement is illegal in the ``finally`` clause. (The reason is a\\nproblem with the current implementation --- this restriction may be\\nlifted in the future).\\n\\nAdditional information on exceptions can be found in section\\n*Exceptions*, and information on using the ``raise`` statement to\\ngenerate exceptions may be found in section *The raise statement*.\\n\\n\\nThe ``with`` statement\\n======================\\n\\nThe ``with`` statement is used to wrap the execution of a block with\\nmethods defined by a context manager (see section *With Statement\\nContext Managers*). This allows common\\n``try``...``except``...``finally`` usage patterns to be encapsulated\\nfor convenient reuse.\\n\\n with_stmt ::= \"with\" with_item (\",\" with_item)* \":\" suite\\n with_item ::= expression [\"as\" target]\\n\\nThe execution of the ``with`` statement with one \"item\" proceeds as\\nfollows:\\n\\n1. The context expression (the expression given in the ``with_item``)\\n is evaluated to obtain a context manager.\\n\\n2. The context manager\\'s ``__exit__()`` is loaded for later use.\\n\\n3. The context manager\\'s ``__enter__()`` method is invoked.\\n\\n4. If a target was included in the ``with`` statement, the return\\n value from ``__enter__()`` is assigned to it.\\n\\n Note: The ``with`` statement guarantees that if the ``__enter__()``\\n method returns without an error, then ``__exit__()`` will always\\n be called. Thus, if an error occurs during the assignment to the\\n target list, it will be treated the same as an error occurring\\n within the suite would be. See step 6 below.\\n\\n5. The suite is executed.\\n\\n6. The context manager\\'s ``__exit__()`` method is invoked. If an\\n exception caused the suite to be exited, its type, value, and\\n traceback are passed as arguments to ``__exit__()``. Otherwise,\\n three ``None`` arguments are supplied.\\n\\n If the suite was exited due to an exception, and the return value\\n from the ``__exit__()`` method was false, the exception is\\n reraised. If the return value was true, the exception is\\n suppressed, and execution continues with the statement following\\n the ``with`` statement.\\n\\n If the suite was exited for any reason other than an exception, the\\n return value from ``__exit__()`` is ignored, and execution proceeds\\n at the normal location for the kind of exit that was taken.\\n\\nWith more than one item, the context managers are processed as if\\nmultiple ``with`` statements were nested:\\n\\n with A() as a, B() as b:\\n suite\\n\\nis equivalent to\\n\\n with A() as a:\\n with B() as b:\\n suite\\n\\nChanged in version 3.1: Support for multiple context expressions.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n\\n\\nFunction definitions\\n====================\\n\\nA function definition defines a user-defined function object (see\\nsection *The standard type hierarchy*):\\n\\n funcdef ::= [decorators] \"def\" funcname \"(\" [parameter_list] \")\" [\"->\" expression] \":\" suite\\n decorators ::= decorator+\\n decorator ::= \"@\" dotted_name [\"(\" [parameter_list [\",\"]] \")\"] NEWLINE\\n dotted_name ::= identifier (\".\" identifier)*\\n parameter_list ::= (defparameter \",\")*\\n ( \"*\" [parameter] (\",\" defparameter)* [\",\" \"**\" parameter]\\n | \"**\" parameter\\n | defparameter [\",\"] )\\n parameter ::= identifier [\":\" expression]\\n defparameter ::= parameter [\"=\" expression]\\n funcname ::= identifier\\n\\nA function definition is an executable statement. Its execution binds\\nthe function name in the current local namespace to a function object\\n(a wrapper around the executable code for the function). This\\nfunction object contains a reference to the current global namespace\\nas the global namespace to be used when the function is called.\\n\\nThe function definition does not execute the function body; this gets\\nexecuted only when the function is called. [3]\\n\\nA function definition may be wrapped by one or more *decorator*\\nexpressions. Decorator expressions are evaluated when the function is\\ndefined, in the scope that contains the function definition. The\\nresult must be a callable, which is invoked with the function object\\nas the only argument. The returned value is bound to the function name\\ninstead of the function object. Multiple decorators are applied in\\nnested fashion. For example, the following code\\n\\n @f1(arg)\\n @f2\\n def func(): pass\\n\\nis equivalent to\\n\\n def func(): pass\\n func = f1(arg)(f2(func))\\n\\nWhen one or more *parameters* have the form *parameter* ``=``\\n*expression*, the function is said to have \"default parameter values.\"\\nFor a parameter with a default value, the corresponding *argument* may\\nbe omitted from a call, in which case the parameter\\'s default value is\\nsubstituted. If a parameter has a default value, all following\\nparameters up until the \"``*``\" must also have a default value ---\\nthis is a syntactic restriction that is not expressed by the grammar.\\n\\n**Default parameter values are evaluated when the function definition\\nis executed.** This means that the expression is evaluated once, when\\nthe function is defined, and that the same \"pre-computed\" value is\\nused for each call. This is especially important to understand when a\\ndefault parameter is a mutable object, such as a list or a dictionary:\\nif the function modifies the object (e.g. by appending an item to a\\nlist), the default value is in effect modified. This is generally not\\nwhat was intended. A way around this is to use ``None`` as the\\ndefault, and explicitly test for it in the body of the function, e.g.:\\n\\n def whats_on_the_telly(penguin=None):\\n if penguin is None:\\n penguin = []\\n penguin.append(\"property of the zoo\")\\n return penguin\\n\\nFunction call semantics are described in more detail in section\\n*Calls*. A function call always assigns values to all parameters\\nmentioned in the parameter list, either from position arguments, from\\nkeyword arguments, or from default values. If the form\\n\"``*identifier``\" is present, it is initialized to a tuple receiving\\nany excess positional parameters, defaulting to the empty tuple. If\\nthe form \"``**identifier``\" is present, it is initialized to a new\\ndictionary receiving any excess keyword arguments, defaulting to a new\\nempty dictionary. Parameters after \"``*``\" or \"``*identifier``\" are\\nkeyword-only parameters and may only be passed used keyword arguments.\\n\\nParameters may have annotations of the form \"``: expression``\"\\nfollowing the parameter name. Any parameter may have an annotation\\neven those of the form ``*identifier`` or ``**identifier``. Functions\\nmay have \"return\" annotation of the form \"``-> expression``\" after the\\nparameter list. These annotations can be any valid Python expression\\nand are evaluated when the function definition is executed.\\nAnnotations may be evaluated in a different order than they appear in\\nthe source code. The presence of annotations does not change the\\nsemantics of a function. The annotation values are available as\\nvalues of a dictionary keyed by the parameters\\' names in the\\n``__annotations__`` attribute of the function object.\\n\\nIt is also possible to create anonymous functions (functions not bound\\nto a name), for immediate use in expressions. This uses lambda forms,\\ndescribed in section *Lambdas*. Note that the lambda form is merely a\\nshorthand for a simplified function definition; a function defined in\\na \"``def``\" statement can be passed around or assigned to another name\\njust like a function defined by a lambda form. The \"``def``\" form is\\nactually more powerful since it allows the execution of multiple\\nstatements and annotations.\\n\\n**Programmer\\'s note:** Functions are first-class objects. A \"``def``\"\\nform executed inside a function definition defines a local function\\nthat can be returned or passed around. Free variables used in the\\nnested function can access the local variables of the function\\ncontaining the def. See section *Naming and binding* for details.\\n\\nSee also:\\n\\n **PEP 3107** - Function Annotations\\n The original specification for function annotations.\\n\\n\\nClass definitions\\n=================\\n\\nA class definition defines a class object (see section *The standard\\ntype hierarchy*):\\n\\n classdef ::= [decorators] \"class\" classname [inheritance] \":\" suite\\n inheritance ::= \"(\" [parameter_list] \")\"\\n classname ::= identifier\\n\\nA class definition is an executable statement. The inheritance list\\nusually gives a list of base classes (see *Customizing class creation*\\nfor more advanced uses), so each item in the list should evaluate to a\\nclass object which allows subclassing. Classes without an inheritance\\nlist inherit, by default, from the base class ``object``; hence,\\n\\n class Foo:\\n pass\\n\\nis equivalent to\\n\\n class Foo(object):\\n pass\\n\\nThe class\\'s suite is then executed in a new execution frame (see\\n*Naming and binding*), using a newly created local namespace and the\\noriginal global namespace. (Usually, the suite contains mostly\\nfunction definitions.) When the class\\'s suite finishes execution, its\\nexecution frame is discarded but its local namespace is saved. [4] A\\nclass object is then created using the inheritance list for the base\\nclasses and the saved local namespace for the attribute dictionary.\\nThe class name is bound to this class object in the original local\\nnamespace.\\n\\nClass creation can be customized heavily using *metaclasses*.\\n\\nClasses can also be decorated: just like when decorating functions,\\n\\n @f1(arg)\\n @f2\\n class Foo: pass\\n\\nis equivalent to\\n\\n class Foo: pass\\n Foo = f1(arg)(f2(Foo))\\n\\nThe evaluation rules for the decorator expressions are the same as for\\nfunction decorators. The result must be a class object, which is then\\nbound to the class name.\\n\\n**Programmer\\'s note:** Variables defined in the class definition are\\nclass attributes; they are shared by instances. Instance attributes\\ncan be set in a method with ``self.name = value``. Both class and\\ninstance attributes are accessible through the notation\\n\"``self.name``\", and an instance attribute hides a class attribute\\nwith the same name when accessed in this way. Class attributes can be\\nused as defaults for instance attributes, but using mutable values\\nthere can lead to unexpected results. *Descriptors* can be used to\\ncreate instance variables with different implementation details.\\n\\nSee also:\\n\\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\\n Decorators\\n\\n-[ Footnotes ]-\\n\\n[1] The exception is propagated to the invocation stack unless there\\n is a ``finally`` clause which happens to raise another exception.\\n That new exception causes the old one to be lost.\\n\\n[2] Currently, control \"flows off the end\" except in the case of an\\n exception or the execution of a ``return``, ``continue``, or\\n ``break`` statement.\\n\\n[3] A string literal appearing as the first statement in the function\\n body is transformed into the function\\'s ``__doc__`` attribute and\\n therefore the function\\'s *docstring*.\\n\\n[4] A string literal appearing as the first statement in the class\\n body is transformed into the namespace\\'s ``__doc__`` item and\\n therefore the class\\'s *docstring*.\\n',\n'context-managers':'\\nWith Statement Context Managers\\n*******************************\\n\\nA *context manager* is an object that defines the runtime context to\\nbe established when executing a ``with`` statement. The context\\nmanager handles the entry into, and the exit from, the desired runtime\\ncontext for the execution of the block of code. Context managers are\\nnormally invoked using the ``with`` statement (described in section\\n*The with statement*), but can also be used by directly invoking their\\nmethods.\\n\\nTypical uses of context managers include saving and restoring various\\nkinds of global state, locking and unlocking resources, closing opened\\nfiles, etc.\\n\\nFor more information on context managers, see *Context Manager Types*.\\n\\nobject.__enter__(self)\\n\\n Enter the runtime context related to this object. The ``with``\\n statement will bind this method\\'s return value to the target(s)\\n specified in the ``as`` clause of the statement, if any.\\n\\nobject.__exit__(self, exc_type, exc_value, traceback)\\n\\n Exit the runtime context related to this object. The parameters\\n describe the exception that caused the context to be exited. If the\\n context was exited without an exception, all three arguments will\\n be ``None``.\\n\\n If an exception is supplied, and the method wishes to suppress the\\n exception (i.e., prevent it from being propagated), it should\\n return a true value. Otherwise, the exception will be processed\\n normally upon exit from this method.\\n\\n Note that ``__exit__()`` methods should not reraise the passed-in\\n exception; this is the caller\\'s responsibility.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n',\n'continue':'\\nThe ``continue`` statement\\n**************************\\n\\n continue_stmt ::= \"continue\"\\n\\n``continue`` may only occur syntactically nested in a ``for`` or\\n``while`` loop, but not nested in a function or class definition or\\n``finally`` clause within that loop. It continues with the next cycle\\nof the nearest enclosing loop.\\n\\nWhen ``continue`` passes control out of a ``try`` statement with a\\n``finally`` clause, that ``finally`` clause is executed before really\\nstarting the next loop cycle.\\n',\n'conversions':'\\nArithmetic conversions\\n**********************\\n\\nWhen a description of an arithmetic operator below uses the phrase\\n\"the numeric arguments are converted to a common type,\" this means\\nthat the operator implementation for built-in types works that way:\\n\\n* If either argument is a complex number, the other is converted to\\n complex;\\n\\n* otherwise, if either argument is a floating point number, the other\\n is converted to floating point;\\n\\n* otherwise, both must be integers and no conversion is necessary.\\n\\nSome additional rules apply for certain operators (e.g., a string left\\nargument to the \\'%\\' operator). Extensions must define their own\\nconversion behavior.\\n',\n'customization':'\\nBasic customization\\n*******************\\n\\nobject.__new__(cls[, ...])\\n\\n Called to create a new instance of class *cls*. ``__new__()`` is a\\n static method (special-cased so you need not declare it as such)\\n that takes the class of which an instance was requested as its\\n first argument. The remaining arguments are those passed to the\\n object constructor expression (the call to the class). The return\\n value of ``__new__()`` should be the new object instance (usually\\n an instance of *cls*).\\n\\n Typical implementations create a new instance of the class by\\n invoking the superclass\\'s ``__new__()`` method using\\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\\n arguments and then modifying the newly-created instance as\\n necessary before returning it.\\n\\n If ``__new__()`` returns an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will be invoked like\\n ``__init__(self[, ...])``, where *self* is the new instance and the\\n remaining arguments are the same as were passed to ``__new__()``.\\n\\n If ``__new__()`` does not return an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will not be invoked.\\n\\n ``__new__()`` is intended mainly to allow subclasses of immutable\\n types (like int, str, or tuple) to customize instance creation. It\\n is also commonly overridden in custom metaclasses in order to\\n customize class creation.\\n\\nobject.__init__(self[, ...])\\n\\n Called when the instance is created. The arguments are those\\n passed to the class constructor expression. If a base class has an\\n ``__init__()`` method, the derived class\\'s ``__init__()`` method,\\n if any, must explicitly call it to ensure proper initialization of\\n the base class part of the instance; for example:\\n ``BaseClass.__init__(self, [args...])``. As a special constraint\\n on constructors, no value may be returned; doing so will cause a\\n ``TypeError`` to be raised at runtime.\\n\\nobject.__del__(self)\\n\\n Called when the instance is about to be destroyed. This is also\\n called a destructor. If a base class has a ``__del__()`` method,\\n the derived class\\'s ``__del__()`` method, if any, must explicitly\\n call it to ensure proper deletion of the base class part of the\\n instance. Note that it is possible (though not recommended!) for\\n the ``__del__()`` method to postpone destruction of the instance by\\n creating a new reference to it. It may then be called at a later\\n time when this new reference is deleted. It is not guaranteed that\\n ``__del__()`` methods are called for objects that still exist when\\n the interpreter exits.\\n\\n Note: ``del x`` doesn\\'t directly call ``x.__del__()`` --- the former\\n decrements the reference count for ``x`` by one, and the latter\\n is only called when ``x``\\'s reference count reaches zero. Some\\n common situations that may prevent the reference count of an\\n object from going to zero include: circular references between\\n objects (e.g., a doubly-linked list or a tree data structure with\\n parent and child pointers); a reference to the object on the\\n stack frame of a function that caught an exception (the traceback\\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\\n a reference to the object on the stack frame that raised an\\n unhandled exception in interactive mode (the traceback stored in\\n ``sys.last_traceback`` keeps the stack frame alive). The first\\n situation can only be remedied by explicitly breaking the cycles;\\n the latter two situations can be resolved by storing ``None`` in\\n ``sys.last_traceback``. Circular references which are garbage are\\n detected when the option cycle detector is enabled (it\\'s on by\\n default), but can only be cleaned up if there are no Python-\\n level ``__del__()`` methods involved. Refer to the documentation\\n for the ``gc`` module for more information about how\\n ``__del__()`` methods are handled by the cycle detector,\\n particularly the description of the ``garbage`` value.\\n\\n Warning: Due to the precarious circumstances under which ``__del__()``\\n methods are invoked, exceptions that occur during their execution\\n are ignored, and a warning is printed to ``sys.stderr`` instead.\\n Also, when ``__del__()`` is invoked in response to a module being\\n deleted (e.g., when execution of the program is done), other\\n globals referenced by the ``__del__()`` method may already have\\n been deleted or in the process of being torn down (e.g. the\\n import machinery shutting down). For this reason, ``__del__()``\\n methods should do the absolute minimum needed to maintain\\n external invariants. Starting with version 1.5, Python\\n guarantees that globals whose name begins with a single\\n underscore are deleted from their module before other globals are\\n deleted; if no other references to such globals exist, this may\\n help in assuring that imported modules are still available at the\\n time when the ``__del__()`` method is called.\\n\\nobject.__repr__(self)\\n\\n Called by the ``repr()`` built-in function to compute the\\n \"official\" string representation of an object. If at all possible,\\n this should look like a valid Python expression that could be used\\n to recreate an object with the same value (given an appropriate\\n environment). If this is not possible, a string of the form\\n ``<...some useful description...>`` should be returned. The return\\n value must be a string object. If a class defines ``__repr__()``\\n but not ``__str__()``, then ``__repr__()`` is also used when an\\n \"informal\" string representation of instances of that class is\\n required.\\n\\n This is typically used for debugging, so it is important that the\\n representation is information-rich and unambiguous.\\n\\nobject.__str__(self)\\n\\n Called by ``str(object)`` and the built-in functions ``format()``\\n and ``print()`` to compute the \"informal\" or nicely printable\\n string representation of an object. The return value must be a\\n *string* object.\\n\\n This method differs from ``object.__repr__()`` in that there is no\\n expectation that ``__str__()`` return a valid Python expression: a\\n more convenient or concise representation can be used.\\n\\n The default implementation defined by the built-in type ``object``\\n calls ``object.__repr__()``.\\n\\nobject.__bytes__(self)\\n\\n Called by ``bytes()`` to compute a byte-string representation of an\\n object. This should return a ``bytes`` object.\\n\\nobject.__format__(self, format_spec)\\n\\n Called by the ``format()`` built-in function (and by extension, the\\n ``str.format()`` method of class ``str``) to produce a \"formatted\"\\n string representation of an object. The ``format_spec`` argument is\\n a string that contains a description of the formatting options\\n desired. The interpretation of the ``format_spec`` argument is up\\n to the type implementing ``__format__()``, however most classes\\n will either delegate formatting to one of the built-in types, or\\n use a similar formatting option syntax.\\n\\n See *Format Specification Mini-Language* for a description of the\\n standard formatting syntax.\\n\\n The return value must be a string object.\\n\\nobject.__lt__(self, other)\\nobject.__le__(self, other)\\nobject.__eq__(self, other)\\nobject.__ne__(self, other)\\nobject.__gt__(self, other)\\nobject.__ge__(self, other)\\n\\n These are the so-called \"rich comparison\" methods. The\\n correspondence between operator symbols and method names is as\\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\\n ``x.__ge__(y)``.\\n\\n A rich comparison method may return the singleton\\n ``NotImplemented`` if it does not implement the operation for a\\n given pair of arguments. By convention, ``False`` and ``True`` are\\n returned for a successful comparison. However, these methods can\\n return any value, so if the comparison operator is used in a\\n Boolean context (e.g., in the condition of an ``if`` statement),\\n Python will call ``bool()`` on the value to determine if the result\\n is true or false.\\n\\n There are no implied relationships among the comparison operators.\\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\\n Accordingly, when defining ``__eq__()``, one should also define\\n ``__ne__()`` so that the operators will behave as expected. See\\n the paragraph on ``__hash__()`` for some important notes on\\n creating *hashable* objects which support custom comparison\\n operations and are usable as dictionary keys.\\n\\n There are no swapped-argument versions of these methods (to be used\\n when the left argument does not support the operation but the right\\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\\n other\\'s reflection, ``__le__()`` and ``__ge__()`` are each other\\'s\\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\\n reflection.\\n\\n Arguments to rich comparison methods are never coerced.\\n\\n To automatically generate ordering operations from a single root\\n operation, see ``functools.total_ordering()``.\\n\\nobject.__hash__(self)\\n\\n Called by built-in function ``hash()`` and for operations on\\n members of hashed collections including ``set``, ``frozenset``, and\\n ``dict``. ``__hash__()`` should return an integer. The only\\n required property is that objects which compare equal have the same\\n hash value; it is advised to somehow mix together (e.g. using\\n exclusive or) the hash values for the components of the object that\\n also play a part in comparison of objects.\\n\\n If a class does not define an ``__eq__()`` method it should not\\n define a ``__hash__()`` operation either; if it defines\\n ``__eq__()`` but not ``__hash__()``, its instances will not be\\n usable as items in hashable collections. If a class defines\\n mutable objects and implements an ``__eq__()`` method, it should\\n not implement ``__hash__()``, since the implementation of hashable\\n collections requires that a key\\'s hash value is immutable (if the\\n object\\'s hash value changes, it will be in the wrong hash bucket).\\n\\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\\n by default; with them, all objects compare unequal (except with\\n themselves) and ``x.__hash__()`` returns an appropriate value such\\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\\n hash(y)``.\\n\\n A class that overrides ``__eq__()`` and does not define\\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\\n ``None``. When the ``__hash__()`` method of a class is ``None``,\\n instances of the class will raise an appropriate ``TypeError`` when\\n a program attempts to retrieve their hash value, and will also be\\n correctly identified as unhashable when checking ``isinstance(obj,\\n collections.Hashable``).\\n\\n If a class that overrides ``__eq__()`` needs to retain the\\n implementation of ``__hash__()`` from a parent class, the\\n interpreter must be told this explicitly by setting ``__hash__ =\\n .__hash__``.\\n\\n If a class that does not override ``__eq__()`` wishes to suppress\\n hash support, it should include ``__hash__ = None`` in the class\\n definition. A class which defines its own ``__hash__()`` that\\n explicitly raises a ``TypeError`` would be incorrectly identified\\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\\n\\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\\n objects are \"salted\" with an unpredictable random value.\\n Although they remain constant within an individual Python\\n process, they are not predictable between repeated invocations of\\n Python.This is intended to provide protection against a denial-\\n of-service caused by carefully-chosen inputs that exploit the\\n worst case performance of a dict insertion, O(n^2) complexity.\\n See http://www.ocert.org/advisories/ocert-2011-003.html for\\n details.Changing hash values affects the iteration order of\\n dicts, sets and other mappings. Python has never made guarantees\\n about this ordering (and it typically varies between 32-bit and\\n 64-bit builds).See also ``PYTHONHASHSEED``.\\n\\n Changed in version 3.3: Hash randomization is enabled by default.\\n\\nobject.__bool__(self)\\n\\n Called to implement truth value testing and the built-in operation\\n ``bool()``; should return ``False`` or ``True``. When this method\\n is not defined, ``__len__()`` is called, if it is defined, and the\\n object is considered true if its result is nonzero. If a class\\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\\n are considered true.\\n',\n'debugger':'\\n``pdb`` --- The Python Debugger\\n*******************************\\n\\nThe module ``pdb`` defines an interactive source code debugger for\\nPython programs. It supports setting (conditional) breakpoints and\\nsingle stepping at the source line level, inspection of stack frames,\\nsource code listing, and evaluation of arbitrary Python code in the\\ncontext of any stack frame. It also supports post-mortem debugging\\nand can be called under program control.\\n\\nThe debugger is extensible -- it is actually defined as the class\\n``Pdb``. This is currently undocumented but easily understood by\\nreading the source. The extension interface uses the modules ``bdb``\\nand ``cmd``.\\n\\nThe debugger\\'s prompt is ``(Pdb)``. Typical usage to run a program\\nunder control of the debugger is:\\n\\n >>> import pdb\\n >>> import mymodule\\n >>> pdb.run(\\'mymodule.test()\\')\\n > (0)?()\\n (Pdb) continue\\n > (1)?()\\n (Pdb) continue\\n NameError: \\'spam\\'\\n > (1)?()\\n (Pdb)\\n\\nChanged in version 3.3: Tab-completion via the ``readline`` module is\\navailable for commands and command arguments, e.g. the current global\\nand local names are offered as arguments of the ``print`` command.\\n\\n``pdb.py`` can also be invoked as a script to debug other scripts.\\nFor example:\\n\\n python3 -m pdb myscript.py\\n\\nWhen invoked as a script, pdb will automatically enter post-mortem\\ndebugging if the program being debugged exits abnormally. After post-\\nmortem debugging (or after normal exit of the program), pdb will\\nrestart the program. Automatic restarting preserves pdb\\'s state (such\\nas breakpoints) and in most cases is more useful than quitting the\\ndebugger upon program\\'s exit.\\n\\nNew in version 3.2: ``pdb.py`` now accepts a ``-c`` option that\\nexecutes commands as if given in a ``.pdbrc`` file, see *Debugger\\nCommands*.\\n\\nThe typical usage to break into the debugger from a running program is\\nto insert\\n\\n import pdb; pdb.set_trace()\\n\\nat the location you want to break into the debugger. You can then\\nstep through the code following this statement, and continue running\\nwithout the debugger using the ``continue`` command.\\n\\nThe typical usage to inspect a crashed program is:\\n\\n >>> import pdb\\n >>> import mymodule\\n >>> mymodule.test()\\n Traceback (most recent call last):\\n File \"\", line 1, in ?\\n File \"./mymodule.py\", line 4, in test\\n test2()\\n File \"./mymodule.py\", line 3, in test2\\n print(spam)\\n NameError: spam\\n >>> pdb.pm()\\n > ./mymodule.py(3)test2()\\n -> print(spam)\\n (Pdb)\\n\\nThe module defines the following functions; each enters the debugger\\nin a slightly different way:\\n\\npdb.run(statement, globals=None, locals=None)\\n\\n Execute the *statement* (given as a string or a code object) under\\n debugger control. The debugger prompt appears before any code is\\n executed; you can set breakpoints and type ``continue``, or you can\\n step through the statement using ``step`` or ``next`` (all these\\n commands are explained below). The optional *globals* and *locals*\\n arguments specify the environment in which the code is executed; by\\n default the dictionary of the module ``__main__`` is used. (See\\n the explanation of the built-in ``exec()`` or ``eval()``\\n functions.)\\n\\npdb.runeval(expression, globals=None, locals=None)\\n\\n Evaluate the *expression* (given as a string or a code object)\\n under debugger control. When ``runeval()`` returns, it returns the\\n value of the expression. Otherwise this function is similar to\\n ``run()``.\\n\\npdb.runcall(function, *args, **kwds)\\n\\n Call the *function* (a function or method object, not a string)\\n with the given arguments. When ``runcall()`` returns, it returns\\n whatever the function call returned. The debugger prompt appears\\n as soon as the function is entered.\\n\\npdb.set_trace()\\n\\n Enter the debugger at the calling stack frame. This is useful to\\n hard-code a breakpoint at a given point in a program, even if the\\n code is not otherwise being debugged (e.g. when an assertion\\n fails).\\n\\npdb.post_mortem(traceback=None)\\n\\n Enter post-mortem debugging of the given *traceback* object. If no\\n *traceback* is given, it uses the one of the exception that is\\n currently being handled (an exception must be being handled if the\\n default is to be used).\\n\\npdb.pm()\\n\\n Enter post-mortem debugging of the traceback found in\\n ``sys.last_traceback``.\\n\\nThe ``run*`` functions and ``set_trace()`` are aliases for\\ninstantiating the ``Pdb`` class and calling the method of the same\\nname. If you want to access further features, you have to do this\\nyourself:\\n\\nclass class pdb.Pdb(completekey=\\'tab\\', stdin=None, stdout=None, skip=None, nosigint=False)\\n\\n ``Pdb`` is the debugger class.\\n\\n The *completekey*, *stdin* and *stdout* arguments are passed to the\\n underlying ``cmd.Cmd`` class; see the description there.\\n\\n The *skip* argument, if given, must be an iterable of glob-style\\n module name patterns. The debugger will not step into frames that\\n originate in a module that matches one of these patterns. [1]\\n\\n By default, Pdb sets a handler for the SIGINT signal (which is sent\\n when the user presses Ctrl-C on the console) when you give a\\n ``continue`` command. This allows you to break into the debugger\\n again by pressing Ctrl-C. If you want Pdb not to touch the SIGINT\\n handler, set *nosigint* tot true.\\n\\n Example call to enable tracing with *skip*:\\n\\n import pdb; pdb.Pdb(skip=[\\'django.*\\']).set_trace()\\n\\n New in version 3.1: The *skip* argument.\\n\\n New in version 3.2: The *nosigint* argument. Previously, a SIGINT\\n handler was never set by Pdb.\\n\\n run(statement, globals=None, locals=None)\\n runeval(expression, globals=None, locals=None)\\n runcall(function, *args, **kwds)\\n set_trace()\\n\\n See the documentation for the functions explained above.\\n\\n\\nDebugger Commands\\n=================\\n\\nThe commands recognized by the debugger are listed below. Most\\ncommands can be abbreviated to one or two letters as indicated; e.g.\\n``h(elp)`` means that either ``h`` or ``help`` can be used to enter\\nthe help command (but not ``he`` or ``hel``, nor ``H`` or ``Help`` or\\n``HELP``). Arguments to commands must be separated by whitespace\\n(spaces or tabs). Optional arguments are enclosed in square brackets\\n(``[]``) in the command syntax; the square brackets must not be typed.\\nAlternatives in the command syntax are separated by a vertical bar\\n(``|``).\\n\\nEntering a blank line repeats the last command entered. Exception: if\\nthe last command was a ``list`` command, the next 11 lines are listed.\\n\\nCommands that the debugger doesn\\'t recognize are assumed to be Python\\nstatements and are executed in the context of the program being\\ndebugged. Python statements can also be prefixed with an exclamation\\npoint (``!``). This is a powerful way to inspect the program being\\ndebugged; it is even possible to change a variable or call a function.\\nWhen an exception occurs in such a statement, the exception name is\\nprinted but the debugger\\'s state is not changed.\\n\\nThe debugger supports *aliases*. Aliases can have parameters which\\nallows one a certain level of adaptability to the context under\\nexamination.\\n\\nMultiple commands may be entered on a single line, separated by\\n``;;``. (A single ``;`` is not used as it is the separator for\\nmultiple commands in a line that is passed to the Python parser.) No\\nintelligence is applied to separating the commands; the input is split\\nat the first ``;;`` pair, even if it is in the middle of a quoted\\nstring.\\n\\nIf a file ``.pdbrc`` exists in the user\\'s home directory or in the\\ncurrent directory, it is read in and executed as if it had been typed\\nat the debugger prompt. This is particularly useful for aliases. If\\nboth files exist, the one in the home directory is read first and\\naliases defined there can be overridden by the local file.\\n\\nChanged in version 3.2: ``.pdbrc`` can now contain commands that\\ncontinue debugging, such as ``continue`` or ``next``. Previously,\\nthese commands had no effect.\\n\\nh(elp) [command]\\n\\n Without argument, print the list of available commands. With a\\n *command* as argument, print help about that command. ``help pdb``\\n displays the full documentation (the docstring of the ``pdb``\\n module). Since the *command* argument must be an identifier,\\n ``help exec`` must be entered to get help on the ``!`` command.\\n\\nw(here)\\n\\n Print a stack trace, with the most recent frame at the bottom. An\\n arrow indicates the current frame, which determines the context of\\n most commands.\\n\\nd(own) [count]\\n\\n Move the current frame *count* (default one) levels down in the\\n stack trace (to a newer frame).\\n\\nu(p) [count]\\n\\n Move the current frame *count* (default one) levels up in the stack\\n trace (to an older frame).\\n\\nb(reak) [([filename:]lineno | function) [, condition]]\\n\\n With a *lineno* argument, set a break there in the current file.\\n With a *function* argument, set a break at the first executable\\n statement within that function. The line number may be prefixed\\n with a filename and a colon, to specify a breakpoint in another\\n file (probably one that hasn\\'t been loaded yet). The file is\\n searched on ``sys.path``. Note that each breakpoint is assigned a\\n number to which all the other breakpoint commands refer.\\n\\n If a second argument is present, it is an expression which must\\n evaluate to true before the breakpoint is honored.\\n\\n Without argument, list all breaks, including for each breakpoint,\\n the number of times that breakpoint has been hit, the current\\n ignore count, and the associated condition if any.\\n\\ntbreak [([filename:]lineno | function) [, condition]]\\n\\n Temporary breakpoint, which is removed automatically when it is\\n first hit. The arguments are the same as for ``break``.\\n\\ncl(ear) [filename:lineno | bpnumber [bpnumber ...]]\\n\\n With a *filename:lineno* argument, clear all the breakpoints at\\n this line. With a space separated list of breakpoint numbers, clear\\n those breakpoints. Without argument, clear all breaks (but first\\n ask confirmation).\\n\\ndisable [bpnumber [bpnumber ...]]\\n\\n Disable the breakpoints given as a space separated list of\\n breakpoint numbers. Disabling a breakpoint means it cannot cause\\n the program to stop execution, but unlike clearing a breakpoint, it\\n remains in the list of breakpoints and can be (re-)enabled.\\n\\nenable [bpnumber [bpnumber ...]]\\n\\n Enable the breakpoints specified.\\n\\nignore bpnumber [count]\\n\\n Set the ignore count for the given breakpoint number. If count is\\n omitted, the ignore count is set to 0. A breakpoint becomes active\\n when the ignore count is zero. When non-zero, the count is\\n decremented each time the breakpoint is reached and the breakpoint\\n is not disabled and any associated condition evaluates to true.\\n\\ncondition bpnumber [condition]\\n\\n Set a new *condition* for the breakpoint, an expression which must\\n evaluate to true before the breakpoint is honored. If *condition*\\n is absent, any existing condition is removed; i.e., the breakpoint\\n is made unconditional.\\n\\ncommands [bpnumber]\\n\\n Specify a list of commands for breakpoint number *bpnumber*. The\\n commands themselves appear on the following lines. Type a line\\n containing just ``end`` to terminate the commands. An example:\\n\\n (Pdb) commands 1\\n (com) print some_variable\\n (com) end\\n (Pdb)\\n\\n To remove all commands from a breakpoint, type commands and follow\\n it immediately with ``end``; that is, give no commands.\\n\\n With no *bpnumber* argument, commands refers to the last breakpoint\\n set.\\n\\n You can use breakpoint commands to start your program up again.\\n Simply use the continue command, or step, or any other command that\\n resumes execution.\\n\\n Specifying any command resuming execution (currently continue,\\n step, next, return, jump, quit and their abbreviations) terminates\\n the command list (as if that command was immediately followed by\\n end). This is because any time you resume execution (even with a\\n simple next or step), you may encounter another breakpoint--which\\n could have its own command list, leading to ambiguities about which\\n list to execute.\\n\\n If you use the \\'silent\\' command in the command list, the usual\\n message about stopping at a breakpoint is not printed. This may be\\n desirable for breakpoints that are to print a specific message and\\n then continue. If none of the other commands print anything, you\\n see no sign that the breakpoint was reached.\\n\\ns(tep)\\n\\n Execute the current line, stop at the first possible occasion\\n (either in a function that is called or on the next line in the\\n current function).\\n\\nn(ext)\\n\\n Continue execution until the next line in the current function is\\n reached or it returns. (The difference between ``next`` and\\n ``step`` is that ``step`` stops inside a called function, while\\n ``next`` executes called functions at (nearly) full speed, only\\n stopping at the next line in the current function.)\\n\\nunt(il) [lineno]\\n\\n Without argument, continue execution until the line with a number\\n greater than the current one is reached.\\n\\n With a line number, continue execution until a line with a number\\n greater or equal to that is reached. In both cases, also stop when\\n the current frame returns.\\n\\n Changed in version 3.2: Allow giving an explicit line number.\\n\\nr(eturn)\\n\\n Continue execution until the current function returns.\\n\\nc(ont(inue))\\n\\n Continue execution, only stop when a breakpoint is encountered.\\n\\nj(ump) lineno\\n\\n Set the next line that will be executed. Only available in the\\n bottom-most frame. This lets you jump back and execute code again,\\n or jump forward to skip code that you don\\'t want to run.\\n\\n It should be noted that not all jumps are allowed -- for instance\\n it is not possible to jump into the middle of a ``for`` loop or out\\n of a ``finally`` clause.\\n\\nl(ist) [first[, last]]\\n\\n List source code for the current file. Without arguments, list 11\\n lines around the current line or continue the previous listing.\\n With ``.`` as argument, list 11 lines around the current line.\\n With one argument, list 11 lines around at that line. With two\\n arguments, list the given range; if the second argument is less\\n than the first, it is interpreted as a count.\\n\\n The current line in the current frame is indicated by ``->``. If\\n an exception is being debugged, the line where the exception was\\n originally raised or propagated is indicated by ``>>``, if it\\n differs from the current line.\\n\\n New in version 3.2: The ``>>`` marker.\\n\\nll | longlist\\n\\n List all source code for the current function or frame.\\n Interesting lines are marked as for ``list``.\\n\\n New in version 3.2.\\n\\na(rgs)\\n\\n Print the argument list of the current function.\\n\\np(rint) expression\\n\\n Evaluate the *expression* in the current context and print its\\n value.\\n\\npp expression\\n\\n Like the ``print`` command, except the value of the expression is\\n pretty-printed using the ``pprint`` module.\\n\\nwhatis expression\\n\\n Print the type of the *expression*.\\n\\nsource expression\\n\\n Try to get source code for the given object and display it.\\n\\n New in version 3.2.\\n\\ndisplay [expression]\\n\\n Display the value of the expression if it changed, each time\\n execution stops in the current frame.\\n\\n Without expression, list all display expressions for the current\\n frame.\\n\\n New in version 3.2.\\n\\nundisplay [expression]\\n\\n Do not display the expression any more in the current frame.\\n Without expression, clear all display expressions for the current\\n frame.\\n\\n New in version 3.2.\\n\\ninteract\\n\\n Start an interative interpreter (using the ``code`` module) whose\\n global namespace contains all the (global and local) names found in\\n the current scope.\\n\\n New in version 3.2.\\n\\nalias [name [command]]\\n\\n Create an alias called *name* that executes *command*. The command\\n must *not* be enclosed in quotes. Replaceable parameters can be\\n indicated by ``%1``, ``%2``, and so on, while ``%*`` is replaced by\\n all the parameters. If no command is given, the current alias for\\n *name* is shown. If no arguments are given, all aliases are listed.\\n\\n Aliases may be nested and can contain anything that can be legally\\n typed at the pdb prompt. Note that internal pdb commands *can* be\\n overridden by aliases. Such a command is then hidden until the\\n alias is removed. Aliasing is recursively applied to the first\\n word of the command line; all other words in the line are left\\n alone.\\n\\n As an example, here are two useful aliases (especially when placed\\n in the ``.pdbrc`` file):\\n\\n # Print instance variables (usage \"pi classInst\")\\n alias pi for k in %1.__dict__.keys(): print(\"%1.\",k,\"=\",%1.__dict__[k])\\n # Print instance variables in self\\n alias ps pi self\\n\\nunalias name\\n\\n Delete the specified alias.\\n\\n! statement\\n\\n Execute the (one-line) *statement* in the context of the current\\n stack frame. The exclamation point can be omitted unless the first\\n word of the statement resembles a debugger command. To set a\\n global variable, you can prefix the assignment command with a\\n ``global`` statement on the same line, e.g.:\\n\\n (Pdb) global list_options; list_options = [\\'-l\\']\\n (Pdb)\\n\\nrun [args ...]\\nrestart [args ...]\\n\\n Restart the debugged Python program. If an argument is supplied,\\n it is split with ``shlex`` and the result is used as the new\\n ``sys.argv``. History, breakpoints, actions and debugger options\\n are preserved. ``restart`` is an alias for ``run``.\\n\\nq(uit)\\n\\n Quit from the debugger. The program being executed is aborted.\\n\\n-[ Footnotes ]-\\n\\n[1] Whether a frame is considered to originate in a certain module is\\n determined by the ``__name__`` in the frame globals.\\n',\n'del':'\\nThe ``del`` statement\\n*********************\\n\\n del_stmt ::= \"del\" target_list\\n\\nDeletion is recursively defined very similar to the way assignment is\\ndefined. Rather than spelling it out in full details, here are some\\nhints.\\n\\nDeletion of a target list recursively deletes each target, from left\\nto right.\\n\\nDeletion of a name removes the binding of that name from the local or\\nglobal namespace, depending on whether the name occurs in a ``global``\\nstatement in the same code block. If the name is unbound, a\\n``NameError`` exception will be raised.\\n\\nDeletion of attribute references, subscriptions and slicings is passed\\nto the primary object involved; deletion of a slicing is in general\\nequivalent to assignment of an empty slice of the right type (but even\\nthis is determined by the sliced object).\\n\\nChanged in version 3.2: Previously it was illegal to delete a name\\nfrom the local namespace if it occurs as a free variable in a nested\\nblock.\\n',\n'dict':'\\nDictionary displays\\n*******************\\n\\nA dictionary display is a possibly empty series of key/datum pairs\\nenclosed in curly braces:\\n\\n dict_display ::= \"{\" [key_datum_list | dict_comprehension] \"}\"\\n key_datum_list ::= key_datum (\",\" key_datum)* [\",\"]\\n key_datum ::= expression \":\" expression\\n dict_comprehension ::= expression \":\" expression comp_for\\n\\nA dictionary display yields a new dictionary object.\\n\\nIf a comma-separated sequence of key/datum pairs is given, they are\\nevaluated from left to right to define the entries of the dictionary:\\neach key object is used as a key into the dictionary to store the\\ncorresponding datum. This means that you can specify the same key\\nmultiple times in the key/datum list, and the final dictionary\\'s value\\nfor that key will be the last one given.\\n\\nA dict comprehension, in contrast to list and set comprehensions,\\nneeds two expressions separated with a colon followed by the usual\\n\"for\" and \"if\" clauses. When the comprehension is run, the resulting\\nkey and value elements are inserted in the new dictionary in the order\\nthey are produced.\\n\\nRestrictions on the types of the key values are listed earlier in\\nsection *The standard type hierarchy*. (To summarize, the key type\\nshould be *hashable*, which excludes all mutable objects.) Clashes\\nbetween duplicate keys are not detected; the last datum (textually\\nrightmost in the display) stored for a given key value prevails.\\n',\n'dynamic-features':'\\nInteraction with dynamic features\\n*********************************\\n\\nThere are several cases where Python statements are illegal when used\\nin conjunction with nested scopes that contain free variables.\\n\\nIf a variable is referenced in an enclosing scope, it is illegal to\\ndelete the name. An error will be reported at compile time.\\n\\nIf the wild card form of import --- ``import *`` --- is used in a\\nfunction and the function contains or is a nested block with free\\nvariables, the compiler will raise a ``SyntaxError``.\\n\\nThe ``eval()`` and ``exec()`` functions do not have access to the full\\nenvironment for resolving names. Names may be resolved in the local\\nand global namespaces of the caller. Free variables are not resolved\\nin the nearest enclosing namespace, but in the global namespace. [1]\\nThe ``exec()`` and ``eval()`` functions have optional arguments to\\noverride the global and local namespace. If only one namespace is\\nspecified, it is used for both.\\n',\n'else':'\\nThe ``if`` statement\\n********************\\n\\nThe ``if`` statement is used for conditional execution:\\n\\n if_stmt ::= \"if\" expression \":\" suite\\n ( \"elif\" expression \":\" suite )*\\n [\"else\" \":\" suite]\\n\\nIt selects exactly one of the suites by evaluating the expressions one\\nby one until one is found to be true (see section *Boolean operations*\\nfor the definition of true and false); then that suite is executed\\n(and no other part of the ``if`` statement is executed or evaluated).\\nIf all expressions are false, the suite of the ``else`` clause, if\\npresent, is executed.\\n',\n'exceptions':'\\nExceptions\\n**********\\n\\nExceptions are a means of breaking out of the normal flow of control\\nof a code block in order to handle errors or other exceptional\\nconditions. An exception is *raised* at the point where the error is\\ndetected; it may be *handled* by the surrounding code block or by any\\ncode block that directly or indirectly invoked the code block where\\nthe error occurred.\\n\\nThe Python interpreter raises an exception when it detects a run-time\\nerror (such as division by zero). A Python program can also\\nexplicitly raise an exception with the ``raise`` statement. Exception\\nhandlers are specified with the ``try`` ... ``except`` statement. The\\n``finally`` clause of such a statement can be used to specify cleanup\\ncode which does not handle the exception, but is executed whether an\\nexception occurred or not in the preceding code.\\n\\nPython uses the \"termination\" model of error handling: an exception\\nhandler can find out what happened and continue execution at an outer\\nlevel, but it cannot repair the cause of the error and retry the\\nfailing operation (except by re-entering the offending piece of code\\nfrom the top).\\n\\nWhen an exception is not handled at all, the interpreter terminates\\nexecution of the program, or returns to its interactive main loop. In\\neither case, it prints a stack backtrace, except when the exception is\\n``SystemExit``.\\n\\nExceptions are identified by class instances. The ``except`` clause\\nis selected depending on the class of the instance: it must reference\\nthe class of the instance or a base class thereof. The instance can\\nbe received by the handler and can carry additional information about\\nthe exceptional condition.\\n\\nNote: Exception messages are not part of the Python API. Their contents\\n may change from one version of Python to the next without warning\\n and should not be relied on by code which will run under multiple\\n versions of the interpreter.\\n\\nSee also the description of the ``try`` statement in section *The try\\nstatement* and ``raise`` statement in section *The raise statement*.\\n\\n-[ Footnotes ]-\\n\\n[1] This limitation occurs because the code that is executed by these\\n operations is not available at the time the module is compiled.\\n',\n'execmodel':'\\nExecution model\\n***************\\n\\n\\nNaming and binding\\n==================\\n\\n*Names* refer to objects. Names are introduced by name binding\\noperations. Each occurrence of a name in the program text refers to\\nthe *binding* of that name established in the innermost function block\\ncontaining the use.\\n\\nA *block* is a piece of Python program text that is executed as a\\nunit. The following are blocks: a module, a function body, and a class\\ndefinition. Each command typed interactively is a block. A script\\nfile (a file given as standard input to the interpreter or specified\\non the interpreter command line the first argument) is a code block.\\nA script command (a command specified on the interpreter command line\\nwith the \\'**-c**\\' option) is a code block. The string argument passed\\nto the built-in functions ``eval()`` and ``exec()`` is a code block.\\n\\nA code block is executed in an *execution frame*. A frame contains\\nsome administrative information (used for debugging) and determines\\nwhere and how execution continues after the code block\\'s execution has\\ncompleted.\\n\\nA *scope* defines the visibility of a name within a block. If a local\\nvariable is defined in a block, its scope includes that block. If the\\ndefinition occurs in a function block, the scope extends to any blocks\\ncontained within the defining one, unless a contained block introduces\\na different binding for the name. The scope of names defined in a\\nclass block is limited to the class block; it does not extend to the\\ncode blocks of methods -- this includes comprehensions and generator\\nexpressions since they are implemented using a function scope. This\\nmeans that the following will fail:\\n\\n class A:\\n a = 42\\n b = list(a + i for i in range(10))\\n\\nWhen a name is used in a code block, it is resolved using the nearest\\nenclosing scope. The set of all such scopes visible to a code block\\nis called the block\\'s *environment*.\\n\\nIf a name is bound in a block, it is a local variable of that block,\\nunless declared as ``nonlocal``. If a name is bound at the module\\nlevel, it is a global variable. (The variables of the module code\\nblock are local and global.) If a variable is used in a code block\\nbut not defined there, it is a *free variable*.\\n\\nWhen a name is not found at all, a ``NameError`` exception is raised.\\nIf the name refers to a local variable that has not been bound, a\\n``UnboundLocalError`` exception is raised. ``UnboundLocalError`` is a\\nsubclass of ``NameError``.\\n\\nThe following constructs bind names: formal parameters to functions,\\n``import`` statements, class and function definitions (these bind the\\nclass or function name in the defining block), and targets that are\\nidentifiers if occurring in an assignment, ``for`` loop header, or\\nafter ``as`` in a ``with`` statement or ``except`` clause. The\\n``import`` statement of the form ``from ... import *`` binds all names\\ndefined in the imported module, except those beginning with an\\nunderscore. This form may only be used at the module level.\\n\\nA target occurring in a ``del`` statement is also considered bound for\\nthis purpose (though the actual semantics are to unbind the name).\\n\\nEach assignment or import statement occurs within a block defined by a\\nclass or function definition or at the module level (the top-level\\ncode block).\\n\\nIf a name binding operation occurs anywhere within a code block, all\\nuses of the name within the block are treated as references to the\\ncurrent block. This can lead to errors when a name is used within a\\nblock before it is bound. This rule is subtle. Python lacks\\ndeclarations and allows name binding operations to occur anywhere\\nwithin a code block. The local variables of a code block can be\\ndetermined by scanning the entire text of the block for name binding\\noperations.\\n\\nIf the ``global`` statement occurs within a block, all uses of the\\nname specified in the statement refer to the binding of that name in\\nthe top-level namespace. Names are resolved in the top-level\\nnamespace by searching the global namespace, i.e. the namespace of the\\nmodule containing the code block, and the builtins namespace, the\\nnamespace of the module ``builtins``. The global namespace is\\nsearched first. If the name is not found there, the builtins\\nnamespace is searched. The global statement must precede all uses of\\nthe name.\\n\\nThe builtins namespace associated with the execution of a code block\\nis actually found by looking up the name ``__builtins__`` in its\\nglobal namespace; this should be a dictionary or a module (in the\\nlatter case the module\\'s dictionary is used). By default, when in the\\n``__main__`` module, ``__builtins__`` is the built-in module\\n``builtins``; when in any other module, ``__builtins__`` is an alias\\nfor the dictionary of the ``builtins`` module itself.\\n``__builtins__`` can be set to a user-created dictionary to create a\\nweak form of restricted execution.\\n\\n**CPython implementation detail:** Users should not touch\\n``__builtins__``; it is strictly an implementation detail. Users\\nwanting to override values in the builtins namespace should ``import``\\nthe ``builtins`` module and modify its attributes appropriately.\\n\\nThe namespace for a module is automatically created the first time a\\nmodule is imported. The main module for a script is always called\\n``__main__``.\\n\\nThe ``global`` statement has the same scope as a name binding\\noperation in the same block. If the nearest enclosing scope for a\\nfree variable contains a global statement, the free variable is\\ntreated as a global.\\n\\nA class definition is an executable statement that may use and define\\nnames. These references follow the normal rules for name resolution.\\nThe namespace of the class definition becomes the attribute dictionary\\nof the class. Names defined at the class scope are not visible in\\nmethods.\\n\\n\\nInteraction with dynamic features\\n---------------------------------\\n\\nThere are several cases where Python statements are illegal when used\\nin conjunction with nested scopes that contain free variables.\\n\\nIf a variable is referenced in an enclosing scope, it is illegal to\\ndelete the name. An error will be reported at compile time.\\n\\nIf the wild card form of import --- ``import *`` --- is used in a\\nfunction and the function contains or is a nested block with free\\nvariables, the compiler will raise a ``SyntaxError``.\\n\\nThe ``eval()`` and ``exec()`` functions do not have access to the full\\nenvironment for resolving names. Names may be resolved in the local\\nand global namespaces of the caller. Free variables are not resolved\\nin the nearest enclosing namespace, but in the global namespace. [1]\\nThe ``exec()`` and ``eval()`` functions have optional arguments to\\noverride the global and local namespace. If only one namespace is\\nspecified, it is used for both.\\n\\n\\nExceptions\\n==========\\n\\nExceptions are a means of breaking out of the normal flow of control\\nof a code block in order to handle errors or other exceptional\\nconditions. An exception is *raised* at the point where the error is\\ndetected; it may be *handled* by the surrounding code block or by any\\ncode block that directly or indirectly invoked the code block where\\nthe error occurred.\\n\\nThe Python interpreter raises an exception when it detects a run-time\\nerror (such as division by zero). A Python program can also\\nexplicitly raise an exception with the ``raise`` statement. Exception\\nhandlers are specified with the ``try`` ... ``except`` statement. The\\n``finally`` clause of such a statement can be used to specify cleanup\\ncode which does not handle the exception, but is executed whether an\\nexception occurred or not in the preceding code.\\n\\nPython uses the \"termination\" model of error handling: an exception\\nhandler can find out what happened and continue execution at an outer\\nlevel, but it cannot repair the cause of the error and retry the\\nfailing operation (except by re-entering the offending piece of code\\nfrom the top).\\n\\nWhen an exception is not handled at all, the interpreter terminates\\nexecution of the program, or returns to its interactive main loop. In\\neither case, it prints a stack backtrace, except when the exception is\\n``SystemExit``.\\n\\nExceptions are identified by class instances. The ``except`` clause\\nis selected depending on the class of the instance: it must reference\\nthe class of the instance or a base class thereof. The instance can\\nbe received by the handler and can carry additional information about\\nthe exceptional condition.\\n\\nNote: Exception messages are not part of the Python API. Their contents\\n may change from one version of Python to the next without warning\\n and should not be relied on by code which will run under multiple\\n versions of the interpreter.\\n\\nSee also the description of the ``try`` statement in section *The try\\nstatement* and ``raise`` statement in section *The raise statement*.\\n\\n-[ Footnotes ]-\\n\\n[1] This limitation occurs because the code that is executed by these\\n operations is not available at the time the module is compiled.\\n',\n'exprlists':'\\nExpression lists\\n****************\\n\\n expression_list ::= expression ( \",\" expression )* [\",\"]\\n\\nAn expression list containing at least one comma yields a tuple. The\\nlength of the tuple is the number of expressions in the list. The\\nexpressions are evaluated from left to right.\\n\\nThe trailing comma is required only to create a single tuple (a.k.a. a\\n*singleton*); it is optional in all other cases. A single expression\\nwithout a trailing comma doesn\\'t create a tuple, but rather yields the\\nvalue of that expression. (To create an empty tuple, use an empty pair\\nof parentheses: ``()``.)\\n',\n'floating':'\\nFloating point literals\\n***********************\\n\\nFloating point literals are described by the following lexical\\ndefinitions:\\n\\n floatnumber ::= pointfloat | exponentfloat\\n pointfloat ::= [intpart] fraction | intpart \".\"\\n exponentfloat ::= (intpart | pointfloat) exponent\\n intpart ::= digit+\\n fraction ::= \".\" digit+\\n exponent ::= (\"e\" | \"E\") [\"+\" | \"-\"] digit+\\n\\nNote that the integer and exponent parts are always interpreted using\\nradix 10. For example, ``077e010`` is legal, and denotes the same\\nnumber as ``77e10``. The allowed range of floating point literals is\\nimplementation-dependent. Some examples of floating point literals:\\n\\n 3.14 10. .001 1e100 3.14e-10 0e0\\n\\nNote that numeric literals do not include a sign; a phrase like ``-1``\\nis actually an expression composed of the unary operator ``-`` and the\\nliteral ``1``.\\n',\n'for':'\\nThe ``for`` statement\\n*********************\\n\\nThe ``for`` statement is used to iterate over the elements of a\\nsequence (such as a string, tuple or list) or other iterable object:\\n\\n for_stmt ::= \"for\" target_list \"in\" expression_list \":\" suite\\n [\"else\" \":\" suite]\\n\\nThe expression list is evaluated once; it should yield an iterable\\nobject. An iterator is created for the result of the\\n``expression_list``. The suite is then executed once for each item\\nprovided by the iterator, in the order of ascending indices. Each\\nitem in turn is assigned to the target list using the standard rules\\nfor assignments (see *Assignment statements*), and then the suite is\\nexecuted. When the items are exhausted (which is immediately when the\\nsequence is empty or an iterator raises a ``StopIteration``\\nexception), the suite in the ``else`` clause, if present, is executed,\\nand the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ncontinues with the next item, or with the ``else`` clause if there was\\nno next item.\\n\\nThe suite may assign to the variable(s) in the target list; this does\\nnot affect the next item assigned to it.\\n\\nNames in the target list are not deleted when the loop is finished,\\nbut if the sequence is empty, it will not have been assigned to at all\\nby the loop. Hint: the built-in function ``range()`` returns an\\niterator of integers suitable to emulate the effect of Pascal\\'s ``for\\ni := a to b do``; e.g., ``list(range(3))`` returns the list ``[0, 1,\\n2]``.\\n\\nNote: There is a subtlety when the sequence is being modified by the loop\\n (this can only occur for mutable sequences, i.e. lists). An\\n internal counter is used to keep track of which item is used next,\\n and this is incremented on each iteration. When this counter has\\n reached the length of the sequence the loop terminates. This means\\n that if the suite deletes the current (or a previous) item from the\\n sequence, the next item will be skipped (since it gets the index of\\n the current item which has already been treated). Likewise, if the\\n suite inserts an item in the sequence before the current item, the\\n current item will be treated again the next time through the loop.\\n This can lead to nasty bugs that can be avoided by making a\\n temporary copy using a slice of the whole sequence, e.g.,\\n\\n for x in a[:]:\\n if x < 0: a.remove(x)\\n',\n'formatstrings':'\\nFormat String Syntax\\n********************\\n\\nThe ``str.format()`` method and the ``Formatter`` class share the same\\nsyntax for format strings (although in the case of ``Formatter``,\\nsubclasses can define their own format string syntax).\\n\\nFormat strings contain \"replacement fields\" surrounded by curly braces\\n``{}``. Anything that is not contained in braces is considered literal\\ntext, which is copied unchanged to the output. If you need to include\\na brace character in the literal text, it can be escaped by doubling:\\n``{{`` and ``}}``.\\n\\nThe grammar for a replacement field is as follows:\\n\\n replacement_field ::= \"{\" [field_name] [\"!\" conversion] [\":\" format_spec] \"}\"\\n field_name ::= arg_name (\".\" attribute_name | \"[\" element_index \"]\")*\\n arg_name ::= [identifier | integer]\\n attribute_name ::= identifier\\n element_index ::= integer | index_string\\n index_string ::= +\\n conversion ::= \"r\" | \"s\" | \"a\"\\n format_spec ::= \\n\\nIn less formal terms, the replacement field can start with a\\n*field_name* that specifies the object whose value is to be formatted\\nand inserted into the output instead of the replacement field. The\\n*field_name* is optionally followed by a *conversion* field, which is\\npreceded by an exclamation point ``\\'!\\'``, and a *format_spec*, which\\nis preceded by a colon ``\\':\\'``. These specify a non-default format\\nfor the replacement value.\\n\\nSee also the *Format Specification Mini-Language* section.\\n\\nThe *field_name* itself begins with an *arg_name* that is either a\\nnumber or a keyword. If it\\'s a number, it refers to a positional\\nargument, and if it\\'s a keyword, it refers to a named keyword\\nargument. If the numerical arg_names in a format string are 0, 1, 2,\\n... in sequence, they can all be omitted (not just some) and the\\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\\nBecause *arg_name* is not quote-delimited, it is not possible to\\nspecify arbitrary dictionary keys (e.g., the strings ``\\'10\\'`` or\\n``\\':-]\\'``) within a format string. The *arg_name* can be followed by\\nany number of index or attribute expressions. An expression of the\\nform ``\\'.name\\'`` selects the named attribute using ``getattr()``,\\nwhile an expression of the form ``\\'[index]\\'`` does an index lookup\\nusing ``__getitem__()``.\\n\\nChanged in version 3.1: The positional argument specifiers can be\\nomitted, so ``\\'{} {}\\'`` is equivalent to ``\\'{0} {1}\\'``.\\n\\nSome simple format string examples:\\n\\n \"First, thou shalt count to {0}\" # References first positional argument\\n \"Bring me a {}\" # Implicitly references the first positional argument\\n \"From {} to {}\" # Same as \"From {0} to {1}\"\\n \"My quest is {name}\" # References keyword argument \\'name\\'\\n \"Weight in tons {0.weight}\" # \\'weight\\' attribute of first positional arg\\n \"Units destroyed: {players[0]}\" # First element of keyword argument \\'players\\'.\\n\\nThe *conversion* field causes a type coercion before formatting.\\nNormally, the job of formatting a value is done by the\\n``__format__()`` method of the value itself. However, in some cases\\nit is desirable to force a type to be formatted as a string,\\noverriding its own definition of formatting. By converting the value\\nto a string before calling ``__format__()``, the normal formatting\\nlogic is bypassed.\\n\\nThree conversion flags are currently supported: ``\\'!s\\'`` which calls\\n``str()`` on the value, ``\\'!r\\'`` which calls ``repr()`` and ``\\'!a\\'``\\nwhich calls ``ascii()``.\\n\\nSome examples:\\n\\n \"Harold\\'s a clever {0!s}\" # Calls str() on the argument first\\n \"Bring out the holy {name!r}\" # Calls repr() on the argument first\\n \"More {!a}\" # Calls ascii() on the argument first\\n\\nThe *format_spec* field contains a specification of how the value\\nshould be presented, including such details as field width, alignment,\\npadding, decimal precision and so on. Each value type can define its\\nown \"formatting mini-language\" or interpretation of the *format_spec*.\\n\\nMost built-in types support a common formatting mini-language, which\\nis described in the next section.\\n\\nA *format_spec* field can also include nested replacement fields\\nwithin it. These nested replacement fields can contain only a field\\nname; conversion flags and format specifications are not allowed. The\\nreplacement fields within the format_spec are substituted before the\\n*format_spec* string is interpreted. This allows the formatting of a\\nvalue to be dynamically specified.\\n\\nSee the *Format examples* section for some examples.\\n\\n\\nFormat Specification Mini-Language\\n==================================\\n\\n\"Format specifications\" are used within replacement fields contained\\nwithin a format string to define how individual values are presented\\n(see *Format String Syntax*). They can also be passed directly to the\\nbuilt-in ``format()`` function. Each formattable type may define how\\nthe format specification is to be interpreted.\\n\\nMost built-in types implement the following options for format\\nspecifications, although some of the formatting options are only\\nsupported by the numeric types.\\n\\nA general convention is that an empty format string (``\"\"``) produces\\nthe same result as if you had called ``str()`` on the value. A non-\\nempty format string typically modifies the result.\\n\\nThe general form of a *standard format specifier* is:\\n\\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\\n fill ::= \\n align ::= \"<\" | \">\" | \"=\" | \"^\"\\n sign ::= \"+\" | \"-\" | \" \"\\n width ::= integer\\n precision ::= integer\\n type ::= \"b\" | \"c\" | \"d\" | \"e\" | \"E\" | \"f\" | \"F\" | \"g\" | \"G\" | \"n\" | \"o\" | \"s\" | \"x\" | \"X\" | \"%\"\\n\\nThe *fill* character can be any character other than \\'{\\' or \\'}\\'. The\\npresence of a fill character is signaled by the character following\\nit, which must be one of the alignment options. If the second\\ncharacter of *format_spec* is not a valid alignment option, then it is\\nassumed that both the fill character and the alignment option are\\nabsent.\\n\\nThe meaning of the various alignment options is as follows:\\n\\n +-----------+------------------------------------------------------------+\\n | Option | Meaning |\\n +===========+============================================================+\\n | ``\\'<\\'`` | Forces the field to be left-aligned within the available |\\n | | space (this is the default for most objects). |\\n +-----------+------------------------------------------------------------+\\n | ``\\'>\\'`` | Forces the field to be right-aligned within the available |\\n | | space (this is the default for numbers). |\\n +-----------+------------------------------------------------------------+\\n | ``\\'=\\'`` | Forces the padding to be placed after the sign (if any) |\\n | | but before the digits. This is used for printing fields |\\n | | in the form \\'+000000120\\'. This alignment option is only |\\n | | valid for numeric types. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'^\\'`` | Forces the field to be centered within the available |\\n | | space. |\\n +-----------+------------------------------------------------------------+\\n\\nNote that unless a minimum field width is defined, the field width\\nwill always be the same size as the data to fill it, so that the\\nalignment option has no meaning in this case.\\n\\nThe *sign* option is only valid for number types, and can be one of\\nthe following:\\n\\n +-----------+------------------------------------------------------------+\\n | Option | Meaning |\\n +===========+============================================================+\\n | ``\\'+\\'`` | indicates that a sign should be used for both positive as |\\n | | well as negative numbers. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'-\\'`` | indicates that a sign should be used only for negative |\\n | | numbers (this is the default behavior). |\\n +-----------+------------------------------------------------------------+\\n | space | indicates that a leading space should be used on positive |\\n | | numbers, and a minus sign on negative numbers. |\\n +-----------+------------------------------------------------------------+\\n\\nThe ``\\'#\\'`` option causes the \"alternate form\" to be used for the\\nconversion. The alternate form is defined differently for different\\ntypes. This option is only valid for integer, float, complex and\\nDecimal types. For integers, when binary, octal, or hexadecimal output\\nis used, this option adds the prefix respective ``\\'0b\\'``, ``\\'0o\\'``, or\\n``\\'0x\\'`` to the output value. For floats, complex and Decimal the\\nalternate form causes the result of the conversion to always contain a\\ndecimal-point character, even if no digits follow it. Normally, a\\ndecimal-point character appears in the result of these conversions\\nonly if a digit follows it. In addition, for ``\\'g\\'`` and ``\\'G\\'``\\nconversions, trailing zeros are not removed from the result.\\n\\nThe ``\\',\\'`` option signals the use of a comma for a thousands\\nseparator. For a locale aware separator, use the ``\\'n\\'`` integer\\npresentation type instead.\\n\\nChanged in version 3.1: Added the ``\\',\\'`` option (see also **PEP\\n378**).\\n\\n*width* is a decimal integer defining the minimum field width. If not\\nspecified, then the field width will be determined by the content.\\n\\nPreceding the *width* field by a zero (``\\'0\\'``) character enables\\nsign-aware zero-padding for numeric types. This is equivalent to a\\n*fill* character of ``\\'0\\'`` with an *alignment* type of ``\\'=\\'``.\\n\\nThe *precision* is a decimal number indicating how many digits should\\nbe displayed after the decimal point for a floating point value\\nformatted with ``\\'f\\'`` and ``\\'F\\'``, or before and after the decimal\\npoint for a floating point value formatted with ``\\'g\\'`` or ``\\'G\\'``.\\nFor non-number types the field indicates the maximum field size - in\\nother words, how many characters will be used from the field content.\\nThe *precision* is not allowed for integer values.\\n\\nFinally, the *type* determines how the data should be presented.\\n\\nThe available string presentation types are:\\n\\n +-----------+------------------------------------------------------------+\\n | Type | Meaning |\\n +===========+============================================================+\\n | ``\\'s\\'`` | String format. This is the default type for strings and |\\n | | may be omitted. |\\n +-----------+------------------------------------------------------------+\\n | None | The same as ``\\'s\\'``. |\\n +-----------+------------------------------------------------------------+\\n\\nThe available integer presentation types are:\\n\\n +-----------+------------------------------------------------------------+\\n | Type | Meaning |\\n +===========+============================================================+\\n | ``\\'b\\'`` | Binary format. Outputs the number in base 2. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'c\\'`` | Character. Converts the integer to the corresponding |\\n | | unicode character before printing. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'d\\'`` | Decimal Integer. Outputs the number in base 10. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'o\\'`` | Octal format. Outputs the number in base 8. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'x\\'`` | Hex format. Outputs the number in base 16, using lower- |\\n | | case letters for the digits above 9. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'X\\'`` | Hex format. Outputs the number in base 16, using upper- |\\n | | case letters for the digits above 9. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'n\\'`` | Number. This is the same as ``\\'d\\'``, except that it uses |\\n | | the current locale setting to insert the appropriate |\\n | | number separator characters. |\\n +-----------+------------------------------------------------------------+\\n | None | The same as ``\\'d\\'``. |\\n +-----------+------------------------------------------------------------+\\n\\nIn addition to the above presentation types, integers can be formatted\\nwith the floating point presentation types listed below (except\\n``\\'n\\'`` and None). When doing so, ``float()`` is used to convert the\\ninteger to a floating point number before formatting.\\n\\nThe available presentation types for floating point and decimal values\\nare:\\n\\n +-----------+------------------------------------------------------------+\\n | Type | Meaning |\\n +===========+============================================================+\\n | ``\\'e\\'`` | Exponent notation. Prints the number in scientific |\\n | | notation using the letter \\'e\\' to indicate the exponent. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'E\\'`` | Exponent notation. Same as ``\\'e\\'`` except it uses an upper |\\n | | case \\'E\\' as the separator character. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'f\\'`` | Fixed point. Displays the number as a fixed-point number. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'F\\'`` | Fixed point. Same as ``\\'f\\'``, but converts ``nan`` to |\\n | | ``NAN`` and ``inf`` to ``INF``. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'g\\'`` | General format. For a given precision ``p >= 1``, this |\\n | | rounds the number to ``p`` significant digits and then |\\n | | formats the result in either fixed-point format or in |\\n | | scientific notation, depending on its magnitude. The |\\n | | precise rules are as follows: suppose that the result |\\n | | formatted with presentation type ``\\'e\\'`` and precision |\\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\\n | | < p``, the number is formatted with presentation type |\\n | | ``\\'f\\'`` and precision ``p-1-exp``. Otherwise, the number |\\n | | is formatted with presentation type ``\\'e\\'`` and precision |\\n | | ``p-1``. In both cases insignificant trailing zeros are |\\n | | removed from the significand, and the decimal point is |\\n | | also removed if there are no remaining digits following |\\n | | it. Positive and negative infinity, positive and negative |\\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\\n | | ``-0`` and ``nan`` respectively, regardless of the |\\n | | precision. A precision of ``0`` is treated as equivalent |\\n | | to a precision of ``1``. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'G\\'`` | General format. Same as ``\\'g\\'`` except switches to ``\\'E\\'`` |\\n | | if the number gets too large. The representations of |\\n | | infinity and NaN are uppercased, too. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'n\\'`` | Number. This is the same as ``\\'g\\'``, except that it uses |\\n | | the current locale setting to insert the appropriate |\\n | | number separator characters. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'%\\'`` | Percentage. Multiplies the number by 100 and displays in |\\n | | fixed (``\\'f\\'``) format, followed by a percent sign. |\\n +-----------+------------------------------------------------------------+\\n | None | Similar to ``\\'g\\'``, except with at least one digit past |\\n | | the decimal point and a default precision of 12. This is |\\n | | intended to match ``str()``, except you can add the other |\\n | | format modifiers. |\\n +-----------+------------------------------------------------------------+\\n\\n\\nFormat examples\\n===============\\n\\nThis section contains examples of the new format syntax and comparison\\nwith the old ``%``-formatting.\\n\\nIn most of the cases the syntax is similar to the old\\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\\ninstead of ``%``. For example, ``\\'%03.2f\\'`` can be translated to\\n``\\'{:03.2f}\\'``.\\n\\nThe new format syntax also supports new and different options, shown\\nin the follow examples.\\n\\nAccessing arguments by position:\\n\\n >>> \\'{0}, {1}, {2}\\'.format(\\'a\\', \\'b\\', \\'c\\')\\n \\'a, b, c\\'\\n >>> \\'{}, {}, {}\\'.format(\\'a\\', \\'b\\', \\'c\\') # 3.1+ only\\n \\'a, b, c\\'\\n >>> \\'{2}, {1}, {0}\\'.format(\\'a\\', \\'b\\', \\'c\\')\\n \\'c, b, a\\'\\n >>> \\'{2}, {1}, {0}\\'.format(*\\'abc\\') # unpacking argument sequence\\n \\'c, b, a\\'\\n >>> \\'{0}{1}{0}\\'.format(\\'abra\\', \\'cad\\') # arguments\\' indices can be repeated\\n \\'abracadabra\\'\\n\\nAccessing arguments by name:\\n\\n >>> \\'Coordinates: {latitude}, {longitude}\\'.format(latitude=\\'37.24N\\', longitude=\\'-115.81W\\')\\n \\'Coordinates: 37.24N, -115.81W\\'\\n >>> coord = {\\'latitude\\': \\'37.24N\\', \\'longitude\\': \\'-115.81W\\'}\\n >>> \\'Coordinates: {latitude}, {longitude}\\'.format(**coord)\\n \\'Coordinates: 37.24N, -115.81W\\'\\n\\nAccessing arguments\\' attributes:\\n\\n >>> c = 3-5j\\n >>> (\\'The complex number {0} is formed from the real part {0.real} \\'\\n ... \\'and the imaginary part {0.imag}.\\').format(c)\\n \\'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\\'\\n >>> class Point:\\n ... def __init__(self, x, y):\\n ... self.x, self.y = x, y\\n ... def __str__(self):\\n ... return \\'Point({self.x}, {self.y})\\'.format(self=self)\\n ...\\n >>> str(Point(4, 2))\\n \\'Point(4, 2)\\'\\n\\nAccessing arguments\\' items:\\n\\n >>> coord = (3, 5)\\n >>> \\'X: {0[0]}; Y: {0[1]}\\'.format(coord)\\n \\'X: 3; Y: 5\\'\\n\\nReplacing ``%s`` and ``%r``:\\n\\n >>> \"repr() shows quotes: {!r}; str() doesn\\'t: {!s}\".format(\\'test1\\', \\'test2\\')\\n \"repr() shows quotes: \\'test1\\'; str() doesn\\'t: test2\"\\n\\nAligning the text and specifying a width:\\n\\n >>> \\'{:<30}\\'.format(\\'left aligned\\')\\n \\'left aligned \\'\\n >>> \\'{:>30}\\'.format(\\'right aligned\\')\\n \\' right aligned\\'\\n >>> \\'{:^30}\\'.format(\\'centered\\')\\n \\' centered \\'\\n >>> \\'{:*^30}\\'.format(\\'centered\\') # use \\'*\\' as a fill char\\n \\'***********centered***********\\'\\n\\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\\n\\n >>> \\'{:+f}; {:+f}\\'.format(3.14, -3.14) # show it always\\n \\'+3.140000; -3.140000\\'\\n >>> \\'{: f}; {: f}\\'.format(3.14, -3.14) # show a space for positive numbers\\n \\' 3.140000; -3.140000\\'\\n >>> \\'{:-f}; {:-f}\\'.format(3.14, -3.14) # show only the minus -- same as \\'{:f}; {:f}\\'\\n \\'3.140000; -3.140000\\'\\n\\nReplacing ``%x`` and ``%o`` and converting the value to different\\nbases:\\n\\n >>> # format also supports binary numbers\\n >>> \"int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}\".format(42)\\n \\'int: 42; hex: 2a; oct: 52; bin: 101010\\'\\n >>> # with 0x, 0o, or 0b as prefix:\\n >>> \"int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}\".format(42)\\n \\'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\\'\\n\\nUsing the comma as a thousands separator:\\n\\n >>> \\'{:,}\\'.format(1234567890)\\n \\'1,234,567,890\\'\\n\\nExpressing a percentage:\\n\\n >>> points = 19\\n >>> total = 22\\n >>> \\'Correct answers: {:.2%}\\'.format(points/total)\\n \\'Correct answers: 86.36%\\'\\n\\nUsing type-specific formatting:\\n\\n >>> import datetime\\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\\n >>> \\'{:%Y-%m-%d %H:%M:%S}\\'.format(d)\\n \\'2010-07-04 12:15:58\\'\\n\\nNesting arguments and more complex examples:\\n\\n >>> for align, text in zip(\\'<^>\\', [\\'left\\', \\'center\\', \\'right\\']):\\n ... \\'{0:{fill}{align}16}\\'.format(text, fill=align, align=align)\\n ...\\n \\'left<<<<<<<<<<<<\\'\\n \\'^^^^^center^^^^^\\'\\n \\'>>>>>>>>>>>right\\'\\n >>>\\n >>> octets = [192, 168, 0, 1]\\n >>> \\'{:02X}{:02X}{:02X}{:02X}\\'.format(*octets)\\n \\'C0A80001\\'\\n >>> int(_, 16)\\n 3232235521\\n >>>\\n >>> width = 5\\n >>> for num in range(5,12): #doctest: +NORMALIZE_WHITESPACE\\n ... for base in \\'dXob\\':\\n ... print(\\'{0:{width}{base}}\\'.format(num, base=base, width=width), end=\\' \\')\\n ... print()\\n ...\\n 5 5 5 101\\n 6 6 6 110\\n 7 7 7 111\\n 8 8 10 1000\\n 9 9 11 1001\\n 10 A 12 1010\\n 11 B 13 1011\\n',\n'function':'\\nFunction definitions\\n********************\\n\\nA function definition defines a user-defined function object (see\\nsection *The standard type hierarchy*):\\n\\n funcdef ::= [decorators] \"def\" funcname \"(\" [parameter_list] \")\" [\"->\" expression] \":\" suite\\n decorators ::= decorator+\\n decorator ::= \"@\" dotted_name [\"(\" [parameter_list [\",\"]] \")\"] NEWLINE\\n dotted_name ::= identifier (\".\" identifier)*\\n parameter_list ::= (defparameter \",\")*\\n ( \"*\" [parameter] (\",\" defparameter)* [\",\" \"**\" parameter]\\n | \"**\" parameter\\n | defparameter [\",\"] )\\n parameter ::= identifier [\":\" expression]\\n defparameter ::= parameter [\"=\" expression]\\n funcname ::= identifier\\n\\nA function definition is an executable statement. Its execution binds\\nthe function name in the current local namespace to a function object\\n(a wrapper around the executable code for the function). This\\nfunction object contains a reference to the current global namespace\\nas the global namespace to be used when the function is called.\\n\\nThe function definition does not execute the function body; this gets\\nexecuted only when the function is called. [3]\\n\\nA function definition may be wrapped by one or more *decorator*\\nexpressions. Decorator expressions are evaluated when the function is\\ndefined, in the scope that contains the function definition. The\\nresult must be a callable, which is invoked with the function object\\nas the only argument. The returned value is bound to the function name\\ninstead of the function object. Multiple decorators are applied in\\nnested fashion. For example, the following code\\n\\n @f1(arg)\\n @f2\\n def func(): pass\\n\\nis equivalent to\\n\\n def func(): pass\\n func = f1(arg)(f2(func))\\n\\nWhen one or more *parameters* have the form *parameter* ``=``\\n*expression*, the function is said to have \"default parameter values.\"\\nFor a parameter with a default value, the corresponding *argument* may\\nbe omitted from a call, in which case the parameter\\'s default value is\\nsubstituted. If a parameter has a default value, all following\\nparameters up until the \"``*``\" must also have a default value ---\\nthis is a syntactic restriction that is not expressed by the grammar.\\n\\n**Default parameter values are evaluated when the function definition\\nis executed.** This means that the expression is evaluated once, when\\nthe function is defined, and that the same \"pre-computed\" value is\\nused for each call. This is especially important to understand when a\\ndefault parameter is a mutable object, such as a list or a dictionary:\\nif the function modifies the object (e.g. by appending an item to a\\nlist), the default value is in effect modified. This is generally not\\nwhat was intended. A way around this is to use ``None`` as the\\ndefault, and explicitly test for it in the body of the function, e.g.:\\n\\n def whats_on_the_telly(penguin=None):\\n if penguin is None:\\n penguin = []\\n penguin.append(\"property of the zoo\")\\n return penguin\\n\\nFunction call semantics are described in more detail in section\\n*Calls*. A function call always assigns values to all parameters\\nmentioned in the parameter list, either from position arguments, from\\nkeyword arguments, or from default values. If the form\\n\"``*identifier``\" is present, it is initialized to a tuple receiving\\nany excess positional parameters, defaulting to the empty tuple. If\\nthe form \"``**identifier``\" is present, it is initialized to a new\\ndictionary receiving any excess keyword arguments, defaulting to a new\\nempty dictionary. Parameters after \"``*``\" or \"``*identifier``\" are\\nkeyword-only parameters and may only be passed used keyword arguments.\\n\\nParameters may have annotations of the form \"``: expression``\"\\nfollowing the parameter name. Any parameter may have an annotation\\neven those of the form ``*identifier`` or ``**identifier``. Functions\\nmay have \"return\" annotation of the form \"``-> expression``\" after the\\nparameter list. These annotations can be any valid Python expression\\nand are evaluated when the function definition is executed.\\nAnnotations may be evaluated in a different order than they appear in\\nthe source code. The presence of annotations does not change the\\nsemantics of a function. The annotation values are available as\\nvalues of a dictionary keyed by the parameters\\' names in the\\n``__annotations__`` attribute of the function object.\\n\\nIt is also possible to create anonymous functions (functions not bound\\nto a name), for immediate use in expressions. This uses lambda forms,\\ndescribed in section *Lambdas*. Note that the lambda form is merely a\\nshorthand for a simplified function definition; a function defined in\\na \"``def``\" statement can be passed around or assigned to another name\\njust like a function defined by a lambda form. The \"``def``\" form is\\nactually more powerful since it allows the execution of multiple\\nstatements and annotations.\\n\\n**Programmer\\'s note:** Functions are first-class objects. A \"``def``\"\\nform executed inside a function definition defines a local function\\nthat can be returned or passed around. Free variables used in the\\nnested function can access the local variables of the function\\ncontaining the def. See section *Naming and binding* for details.\\n\\nSee also:\\n\\n **PEP 3107** - Function Annotations\\n The original specification for function annotations.\\n',\n'global':'\\nThe ``global`` statement\\n************************\\n\\n global_stmt ::= \"global\" identifier (\",\" identifier)*\\n\\nThe ``global`` statement is a declaration which holds for the entire\\ncurrent code block. It means that the listed identifiers are to be\\ninterpreted as globals. It would be impossible to assign to a global\\nvariable without ``global``, although free variables may refer to\\nglobals without being declared global.\\n\\nNames listed in a ``global`` statement must not be used in the same\\ncode block textually preceding that ``global`` statement.\\n\\nNames listed in a ``global`` statement must not be defined as formal\\nparameters or in a ``for`` loop control target, ``class`` definition,\\nfunction definition, or ``import`` statement.\\n\\n**CPython implementation detail:** The current implementation does not\\nenforce the latter two restrictions, but programs should not abuse\\nthis freedom, as future implementations may enforce them or silently\\nchange the meaning of the program.\\n\\n**Programmer\\'s note:** the ``global`` is a directive to the parser.\\nIt applies only to code parsed at the same time as the ``global``\\nstatement. In particular, a ``global`` statement contained in a string\\nor code object supplied to the built-in ``exec()`` function does not\\naffect the code block *containing* the function call, and code\\ncontained in such a string is unaffected by ``global`` statements in\\nthe code containing the function call. The same applies to the\\n``eval()`` and ``compile()`` functions.\\n',\n'id-classes':'\\nReserved classes of identifiers\\n*******************************\\n\\nCertain classes of identifiers (besides keywords) have special\\nmeanings. These classes are identified by the patterns of leading and\\ntrailing underscore characters:\\n\\n``_*``\\n Not imported by ``from module import *``. The special identifier\\n ``_`` is used in the interactive interpreter to store the result of\\n the last evaluation; it is stored in the ``builtins`` module. When\\n not in interactive mode, ``_`` has no special meaning and is not\\n defined. See section *The import statement*.\\n\\n Note: The name ``_`` is often used in conjunction with\\n internationalization; refer to the documentation for the\\n ``gettext`` module for more information on this convention.\\n\\n``__*__``\\n System-defined names. These names are defined by the interpreter\\n and its implementation (including the standard library). Current\\n system names are discussed in the *Special method names* section\\n and elsewhere. More will likely be defined in future versions of\\n Python. *Any* use of ``__*__`` names, in any context, that does\\n not follow explicitly documented use, is subject to breakage\\n without warning.\\n\\n``__*``\\n Class-private names. Names in this category, when used within the\\n context of a class definition, are re-written to use a mangled form\\n to help avoid name clashes between \"private\" attributes of base and\\n derived classes. See section *Identifiers (Names)*.\\n',\n'identifiers':'\\nIdentifiers and keywords\\n************************\\n\\nIdentifiers (also referred to as *names*) are described by the\\nfollowing lexical definitions.\\n\\nThe syntax of identifiers in Python is based on the Unicode standard\\nannex UAX-31, with elaboration and changes as defined below; see also\\n**PEP 3131** for further details.\\n\\nWithin the ASCII range (U+0001..U+007F), the valid characters for\\nidentifiers are the same as in Python 2.x: the uppercase and lowercase\\nletters ``A`` through ``Z``, the underscore ``_`` and, except for the\\nfirst character, the digits ``0`` through ``9``.\\n\\nPython 3.0 introduces additional characters from outside the ASCII\\nrange (see **PEP 3131**). For these characters, the classification\\nuses the version of the Unicode Character Database as included in the\\n``unicodedata`` module.\\n\\nIdentifiers are unlimited in length. Case is significant.\\n\\n identifier ::= xid_start xid_continue*\\n id_start ::= \\n id_continue ::= \\n xid_start ::= \\n xid_continue ::= \\n\\nThe Unicode category codes mentioned above stand for:\\n\\n* *Lu* - uppercase letters\\n\\n* *Ll* - lowercase letters\\n\\n* *Lt* - titlecase letters\\n\\n* *Lm* - modifier letters\\n\\n* *Lo* - other letters\\n\\n* *Nl* - letter numbers\\n\\n* *Mn* - nonspacing marks\\n\\n* *Mc* - spacing combining marks\\n\\n* *Nd* - decimal numbers\\n\\n* *Pc* - connector punctuations\\n\\n* *Other_ID_Start* - explicit list of characters in PropList.txt to\\n support backwards compatibility\\n\\n* *Other_ID_Continue* - likewise\\n\\nAll identifiers are converted into the normal form NFKC while parsing;\\ncomparison of identifiers is based on NFKC.\\n\\nA non-normative HTML file listing all valid identifier characters for\\nUnicode 4.1 can be found at http://www.dcl.hpi.uni-\\npotsdam.de/home/loewis/table-3131.html.\\n\\n\\nKeywords\\n========\\n\\nThe following identifiers are used as reserved words, or *keywords* of\\nthe language, and cannot be used as ordinary identifiers. They must\\nbe spelled exactly as written here:\\n\\n False class finally is return\\n None continue for lambda try\\n True def from nonlocal while\\n and del global not with\\n as elif if or yield\\n assert else import pass\\n break except in raise\\n\\n\\nReserved classes of identifiers\\n===============================\\n\\nCertain classes of identifiers (besides keywords) have special\\nmeanings. These classes are identified by the patterns of leading and\\ntrailing underscore characters:\\n\\n``_*``\\n Not imported by ``from module import *``. The special identifier\\n ``_`` is used in the interactive interpreter to store the result of\\n the last evaluation; it is stored in the ``builtins`` module. When\\n not in interactive mode, ``_`` has no special meaning and is not\\n defined. See section *The import statement*.\\n\\n Note: The name ``_`` is often used in conjunction with\\n internationalization; refer to the documentation for the\\n ``gettext`` module for more information on this convention.\\n\\n``__*__``\\n System-defined names. These names are defined by the interpreter\\n and its implementation (including the standard library). Current\\n system names are discussed in the *Special method names* section\\n and elsewhere. More will likely be defined in future versions of\\n Python. *Any* use of ``__*__`` names, in any context, that does\\n not follow explicitly documented use, is subject to breakage\\n without warning.\\n\\n``__*``\\n Class-private names. Names in this category, when used within the\\n context of a class definition, are re-written to use a mangled form\\n to help avoid name clashes between \"private\" attributes of base and\\n derived classes. See section *Identifiers (Names)*.\\n',\n'if':'\\nThe ``if`` statement\\n********************\\n\\nThe ``if`` statement is used for conditional execution:\\n\\n if_stmt ::= \"if\" expression \":\" suite\\n ( \"elif\" expression \":\" suite )*\\n [\"else\" \":\" suite]\\n\\nIt selects exactly one of the suites by evaluating the expressions one\\nby one until one is found to be true (see section *Boolean operations*\\nfor the definition of true and false); then that suite is executed\\n(and no other part of the ``if`` statement is executed or evaluated).\\nIf all expressions are false, the suite of the ``else`` clause, if\\npresent, is executed.\\n',\n'imaginary':'\\nImaginary literals\\n******************\\n\\nImaginary literals are described by the following lexical definitions:\\n\\n imagnumber ::= (floatnumber | intpart) (\"j\" | \"J\")\\n\\nAn imaginary literal yields a complex number with a real part of 0.0.\\nComplex numbers are represented as a pair of floating point numbers\\nand have the same restrictions on their range. To create a complex\\nnumber with a nonzero real part, add a floating point number to it,\\ne.g., ``(3+4j)``. Some examples of imaginary literals:\\n\\n 3.14j 10.j 10j .001j 1e100j 3.14e-10j\\n',\n'import':'\\nThe ``import`` statement\\n************************\\n\\n import_stmt ::= \"import\" module [\"as\" name] ( \",\" module [\"as\" name] )*\\n | \"from\" relative_module \"import\" identifier [\"as\" name]\\n ( \",\" identifier [\"as\" name] )*\\n | \"from\" relative_module \"import\" \"(\" identifier [\"as\" name]\\n ( \",\" identifier [\"as\" name] )* [\",\"] \")\"\\n | \"from\" module \"import\" \"*\"\\n module ::= (identifier \".\")* identifier\\n relative_module ::= \".\"* module | \".\"+\\n name ::= identifier\\n\\nThe basic import statement (no ``from`` clause) is executed in two\\nsteps:\\n\\n1. find a module, loading and initializing it if necessary\\n\\n2. define a name or names in the local namespace for the scope where\\n the ``import`` statement occurs.\\n\\nWhen the statement contains multiple clauses (separated by commas) the\\ntwo steps are carried out separately for each clause, just as though\\nthe clauses had been separated out into individiual import statements.\\n\\nThe details of the first step, finding and loading modules is\\ndescribed in greater detail in the section on the *import system*,\\nwhich also describes the various types of packages and modules that\\ncan be imported, as well as all the hooks that can be used to\\ncustomize the import system. Note that failures in this step may\\nindicate either that the module could not be located, *or* that an\\nerror occurred while initializing the module, which includes execution\\nof the module\\'s code.\\n\\nIf the requested module is retrieved successfully, it will be made\\navailable in the local namespace in one of three ways:\\n\\n* If the module name is followed by ``as``, then the name following\\n ``as`` is bound directly to the imported module.\\n\\n* If no other name is specified, and the module being imported is a\\n top level module, the module\\'s name is bound in the local namespace\\n as a reference to the imported module\\n\\n* If the module being imported is *not* a top level module, then the\\n name of the top level package that contains the module is bound in\\n the local namespace as a reference to the top level package. The\\n imported module must be accessed using its full qualified name\\n rather than directly\\n\\nThe ``from`` form uses a slightly more complex process:\\n\\n1. find the module specified in the ``from`` clause loading and\\n initializing it if necessary;\\n\\n2. for each of the identifiers specified in the ``import`` clauses:\\n\\n 1. check if the imported module has an attribute by that name\\n\\n 2. if not, attempt to import a submodule with that name and then\\n check the imported module again for that attribute\\n\\n 3. if the attribute is not found, ``ImportError`` is raised.\\n\\n 4. otherwise, a reference to that value is bound in the local\\n namespace, using the name in the ``as`` clause if it is present,\\n otherwise using the attribute name\\n\\nExamples:\\n\\n import foo # foo imported and bound locally\\n import foo.bar.baz # foo.bar.baz imported, foo bound locally\\n import foo.bar.baz as fbb # foo.bar.baz imported and bound as fbb\\n from foo.bar import baz # foo.bar.baz imported and bound as baz\\n from foo import attr # foo imported and foo.attr bound as attr\\n\\nIf the list of identifiers is replaced by a star (``\\'*\\'``), all public\\nnames defined in the module are bound in the local namespace for the\\nscope where the ``import`` statement occurs.\\n\\nThe *public names* defined by a module are determined by checking the\\nmodule\\'s namespace for a variable named ``__all__``; if defined, it\\nmust be a sequence of strings which are names defined or imported by\\nthat module. The names given in ``__all__`` are all considered public\\nand are required to exist. If ``__all__`` is not defined, the set of\\npublic names includes all names found in the module\\'s namespace which\\ndo not begin with an underscore character (``\\'_\\'``). ``__all__``\\nshould contain the entire public API. It is intended to avoid\\naccidentally exporting items that are not part of the API (such as\\nlibrary modules which were imported and used within the module).\\n\\nThe ``from`` form with ``*`` may only occur in a module scope.\\nAttempting to use it in class or function definitions will raise a\\n``SyntaxError``.\\n\\nThe *public names* defined by a module are determined by checking the\\nmodule\\'s namespace for a variable named ``__all__``; if defined, it\\nmust be a sequence of strings which are names defined or imported by\\nthat module. The names given in ``__all__`` are all considered public\\nand are required to exist. If ``__all__`` is not defined, the set of\\npublic names includes all names found in the module\\'s namespace which\\ndo not begin with an underscore character (``\\'_\\'``). ``__all__``\\nshould contain the entire public API. It is intended to avoid\\naccidentally exporting items that are not part of the API (such as\\nlibrary modules which were imported and used within the module).\\n\\nThe ``from`` form with ``*`` may only occur in a module scope. The\\nwild card form of import --- ``import *`` --- is only allowed at the\\nmodule level. Attempting to use it in class or function definitions\\nwill raise a ``SyntaxError``.\\n\\nWhen specifying what module to import you do not have to specify the\\nabsolute name of the module. When a module or package is contained\\nwithin another package it is possible to make a relative import within\\nthe same top package without having to mention the package name. By\\nusing leading dots in the specified module or package after ``from``\\nyou can specify how high to traverse up the current package hierarchy\\nwithout specifying exact names. One leading dot means the current\\npackage where the module making the import exists. Two dots means up\\none package level. Three dots is up two levels, etc. So if you execute\\n``from . import mod`` from a module in the ``pkg`` package then you\\nwill end up importing ``pkg.mod``. If you execute ``from ..subpkg2\\nimport mod`` from within ``pkg.subpkg1`` you will import\\n``pkg.subpkg2.mod``. The specification for relative imports is\\ncontained within **PEP 328**.\\n\\n``importlib.import_module()`` is provided to support applications that\\ndetermine which modules need to be loaded dynamically.\\n\\n\\nFuture statements\\n=================\\n\\nA *future statement* is a directive to the compiler that a particular\\nmodule should be compiled using syntax or semantics that will be\\navailable in a specified future release of Python. The future\\nstatement is intended to ease migration to future versions of Python\\nthat introduce incompatible changes to the language. It allows use of\\nthe new features on a per-module basis before the release in which the\\nfeature becomes standard.\\n\\n future_statement ::= \"from\" \"__future__\" \"import\" feature [\"as\" name]\\n (\",\" feature [\"as\" name])*\\n | \"from\" \"__future__\" \"import\" \"(\" feature [\"as\" name]\\n (\",\" feature [\"as\" name])* [\",\"] \")\"\\n feature ::= identifier\\n name ::= identifier\\n\\nA future statement must appear near the top of the module. The only\\nlines that can appear before a future statement are:\\n\\n* the module docstring (if any),\\n\\n* comments,\\n\\n* blank lines, and\\n\\n* other future statements.\\n\\nThe features recognized by Python 3.0 are ``absolute_import``,\\n``division``, ``generators``, ``unicode_literals``,\\n``print_function``, ``nested_scopes`` and ``with_statement``. They\\nare all redundant because they are always enabled, and only kept for\\nbackwards compatibility.\\n\\nA future statement is recognized and treated specially at compile\\ntime: Changes to the semantics of core constructs are often\\nimplemented by generating different code. It may even be the case\\nthat a new feature introduces new incompatible syntax (such as a new\\nreserved word), in which case the compiler may need to parse the\\nmodule differently. Such decisions cannot be pushed off until\\nruntime.\\n\\nFor any given release, the compiler knows which feature names have\\nbeen defined, and raises a compile-time error if a future statement\\ncontains a feature not known to it.\\n\\nThe direct runtime semantics are the same as for any import statement:\\nthere is a standard module ``__future__``, described later, and it\\nwill be imported in the usual way at the time the future statement is\\nexecuted.\\n\\nThe interesting runtime semantics depend on the specific feature\\nenabled by the future statement.\\n\\nNote that there is nothing special about the statement:\\n\\n import __future__ [as name]\\n\\nThat is not a future statement; it\\'s an ordinary import statement with\\nno special semantics or syntax restrictions.\\n\\nCode compiled by calls to the built-in functions ``exec()`` and\\n``compile()`` that occur in a module ``M`` containing a future\\nstatement will, by default, use the new syntax or semantics associated\\nwith the future statement. This can be controlled by optional\\narguments to ``compile()`` --- see the documentation of that function\\nfor details.\\n\\nA future statement typed at an interactive interpreter prompt will\\ntake effect for the rest of the interpreter session. If an\\ninterpreter is started with the *-i* option, is passed a script name\\nto execute, and the script includes a future statement, it will be in\\neffect in the interactive session started after the script is\\nexecuted.\\n\\nSee also:\\n\\n **PEP 236** - Back to the __future__\\n The original proposal for the __future__ mechanism.\\n',\n'in':'\\nComparisons\\n***********\\n\\nUnlike C, all comparison operations in Python have the same priority,\\nwhich is lower than that of any arithmetic, shifting or bitwise\\noperation. Also unlike C, expressions like ``a < b < c`` have the\\ninterpretation that is conventional in mathematics:\\n\\n comparison ::= or_expr ( comp_operator or_expr )*\\n comp_operator ::= \"<\" | \">\" | \"==\" | \">=\" | \"<=\" | \"!=\"\\n | \"is\" [\"not\"] | [\"not\"] \"in\"\\n\\nComparisons yield boolean values: ``True`` or ``False``.\\n\\nComparisons can be chained arbitrarily, e.g., ``x < y <= z`` is\\nequivalent to ``x < y and y <= z``, except that ``y`` is evaluated\\nonly once (but in both cases ``z`` is not evaluated at all when ``x <\\ny`` is found to be false).\\n\\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\\n*op2*, ..., *opN* are comparison operators, then ``a op1 b op2 c ... y\\nopN z`` is equivalent to ``a op1 b and b op2 c and ... y opN z``,\\nexcept that each expression is evaluated at most once.\\n\\nNote that ``a op1 b op2 c`` doesn\\'t imply any kind of comparison\\nbetween *a* and *c*, so that, e.g., ``x < y > z`` is perfectly legal\\n(though perhaps not pretty).\\n\\nThe operators ``<``, ``>``, ``==``, ``>=``, ``<=``, and ``!=`` compare\\nthe values of two objects. The objects need not have the same type.\\nIf both are numbers, they are converted to a common type. Otherwise,\\nthe ``==`` and ``!=`` operators *always* consider objects of different\\ntypes to be unequal, while the ``<``, ``>``, ``>=`` and ``<=``\\noperators raise a ``TypeError`` when comparing objects of different\\ntypes that do not implement these operators for the given pair of\\ntypes. You can control comparison behavior of objects of non-built-in\\ntypes by defining rich comparison methods like ``__gt__()``, described\\nin section *Basic customization*.\\n\\nComparison of objects of the same type depends on the type:\\n\\n* Numbers are compared arithmetically.\\n\\n* The values ``float(\\'NaN\\')`` and ``Decimal(\\'NaN\\')`` are special. The\\n are identical to themselves, ``x is x`` but are not equal to\\n themselves, ``x != x``. Additionally, comparing any value to a\\n not-a-number value will return ``False``. For example, both ``3 <\\n float(\\'NaN\\')`` and ``float(\\'NaN\\') < 3`` will return ``False``.\\n\\n* Bytes objects are compared lexicographically using the numeric\\n values of their elements.\\n\\n* Strings are compared lexicographically using the numeric equivalents\\n (the result of the built-in function ``ord()``) of their characters.\\n [3] String and bytes object can\\'t be compared!\\n\\n* Tuples and lists are compared lexicographically using comparison of\\n corresponding elements. This means that to compare equal, each\\n element must compare equal and the two sequences must be of the same\\n type and have the same length.\\n\\n If not equal, the sequences are ordered the same as their first\\n differing elements. For example, ``[1,2,x] <= [1,2,y]`` has the\\n same value as ``x <= y``. If the corresponding element does not\\n exist, the shorter sequence is ordered first (for example, ``[1,2] <\\n [1,2,3]``).\\n\\n* Mappings (dictionaries) compare equal if and only if they have the\\n same ``(key, value)`` pairs. Order comparisons ``(\\'<\\', \\'<=\\', \\'>=\\',\\n \\'>\\')`` raise ``TypeError``.\\n\\n* Sets and frozensets define comparison operators to mean subset and\\n superset tests. Those relations do not define total orderings (the\\n two sets ``{1,2}`` and {2,3} are not equal, nor subsets of one\\n another, nor supersets of one another). Accordingly, sets are not\\n appropriate arguments for functions which depend on total ordering.\\n For example, ``min()``, ``max()``, and ``sorted()`` produce\\n undefined results given a list of sets as inputs.\\n\\n* Most other objects of built-in types compare unequal unless they are\\n the same object; the choice whether one object is considered smaller\\n or larger than another one is made arbitrarily but consistently\\n within one execution of a program.\\n\\nComparison of objects of the differing types depends on whether either\\nof the types provide explicit support for the comparison. Most\\nnumeric types can be compared with one another. When cross-type\\ncomparison is not supported, the comparison method returns\\n``NotImplemented``.\\n\\nThe operators ``in`` and ``not in`` test for membership. ``x in s``\\nevaluates to true if *x* is a member of *s*, and false otherwise. ``x\\nnot in s`` returns the negation of ``x in s``. All built-in sequences\\nand set types support this as well as dictionary, for which ``in``\\ntests whether a the dictionary has a given key. For container types\\nsuch as list, tuple, set, frozenset, dict, or collections.deque, the\\nexpression ``x in y`` is equivalent to ``any(x is e or x == e for e in\\ny)``.\\n\\nFor the string and bytes types, ``x in y`` is true if and only if *x*\\nis a substring of *y*. An equivalent test is ``y.find(x) != -1``.\\nEmpty strings are always considered to be a substring of any other\\nstring, so ``\"\" in \"abc\"`` will return ``True``.\\n\\nFor user-defined classes which define the ``__contains__()`` method,\\n``x in y`` is true if and only if ``y.__contains__(x)`` is true.\\n\\nFor user-defined classes which do not define ``__contains__()`` but do\\ndefine ``__iter__()``, ``x in y`` is true if some value ``z`` with ``x\\n== z`` is produced while iterating over ``y``. If an exception is\\nraised during the iteration, it is as if ``in`` raised that exception.\\n\\nLastly, the old-style iteration protocol is tried: if a class defines\\n``__getitem__()``, ``x in y`` is true if and only if there is a non-\\nnegative integer index *i* such that ``x == y[i]``, and all lower\\ninteger indices do not raise ``IndexError`` exception. (If any other\\nexception is raised, it is as if ``in`` raised that exception).\\n\\nThe operator ``not in`` is defined to have the inverse true value of\\n``in``.\\n\\nThe operators ``is`` and ``is not`` test for object identity: ``x is\\ny`` is true if and only if *x* and *y* are the same object. ``x is\\nnot y`` yields the inverse truth value. [4]\\n',\n'integers':'\\nInteger literals\\n****************\\n\\nInteger literals are described by the following lexical definitions:\\n\\n integer ::= decimalinteger | octinteger | hexinteger | bininteger\\n decimalinteger ::= nonzerodigit digit* | \"0\"+\\n nonzerodigit ::= \"1\"...\"9\"\\n digit ::= \"0\"...\"9\"\\n octinteger ::= \"0\" (\"o\" | \"O\") octdigit+\\n hexinteger ::= \"0\" (\"x\" | \"X\") hexdigit+\\n bininteger ::= \"0\" (\"b\" | \"B\") bindigit+\\n octdigit ::= \"0\"...\"7\"\\n hexdigit ::= digit | \"a\"...\"f\" | \"A\"...\"F\"\\n bindigit ::= \"0\" | \"1\"\\n\\nThere is no limit for the length of integer literals apart from what\\ncan be stored in available memory.\\n\\nNote that leading zeros in a non-zero decimal number are not allowed.\\nThis is for disambiguation with C-style octal literals, which Python\\nused before version 3.0.\\n\\nSome examples of integer literals:\\n\\n 7 2147483647 0o177 0b100110111\\n 3 79228162514264337593543950336 0o377 0x100000000\\n 79228162514264337593543950336 0xdeadbeef\\n',\n'lambda':'\\nLambdas\\n*******\\n\\n lambda_form ::= \"lambda\" [parameter_list]: expression\\n lambda_form_nocond ::= \"lambda\" [parameter_list]: expression_nocond\\n\\nLambda forms (lambda expressions) have the same syntactic position as\\nexpressions. They are a shorthand to create anonymous functions; the\\nexpression ``lambda arguments: expression`` yields a function object.\\nThe unnamed object behaves like a function object defined with\\n\\n def (arguments):\\n return expression\\n\\nSee section *Function definitions* for the syntax of parameter lists.\\nNote that functions created with lambda forms cannot contain\\nstatements or annotations.\\n',\n'lists':'\\nList displays\\n*************\\n\\nA list display is a possibly empty series of expressions enclosed in\\nsquare brackets:\\n\\n list_display ::= \"[\" [expression_list | comprehension] \"]\"\\n\\nA list display yields a new list object, the contents being specified\\nby either a list of expressions or a comprehension. When a comma-\\nseparated list of expressions is supplied, its elements are evaluated\\nfrom left to right and placed into the list object in that order.\\nWhen a comprehension is supplied, the list is constructed from the\\nelements resulting from the comprehension.\\n',\n'naming':\"\\nNaming and binding\\n******************\\n\\n*Names* refer to objects. Names are introduced by name binding\\noperations. Each occurrence of a name in the program text refers to\\nthe *binding* of that name established in the innermost function block\\ncontaining the use.\\n\\nA *block* is a piece of Python program text that is executed as a\\nunit. The following are blocks: a module, a function body, and a class\\ndefinition. Each command typed interactively is a block. A script\\nfile (a file given as standard input to the interpreter or specified\\non the interpreter command line the first argument) is a code block.\\nA script command (a command specified on the interpreter command line\\nwith the '**-c**' option) is a code block. The string argument passed\\nto the built-in functions ``eval()`` and ``exec()`` is a code block.\\n\\nA code block is executed in an *execution frame*. A frame contains\\nsome administrative information (used for debugging) and determines\\nwhere and how execution continues after the code block's execution has\\ncompleted.\\n\\nA *scope* defines the visibility of a name within a block. If a local\\nvariable is defined in a block, its scope includes that block. If the\\ndefinition occurs in a function block, the scope extends to any blocks\\ncontained within the defining one, unless a contained block introduces\\na different binding for the name. The scope of names defined in a\\nclass block is limited to the class block; it does not extend to the\\ncode blocks of methods -- this includes comprehensions and generator\\nexpressions since they are implemented using a function scope. This\\nmeans that the following will fail:\\n\\n class A:\\n a = 42\\n b = list(a + i for i in range(10))\\n\\nWhen a name is used in a code block, it is resolved using the nearest\\nenclosing scope. The set of all such scopes visible to a code block\\nis called the block's *environment*.\\n\\nIf a name is bound in a block, it is a local variable of that block,\\nunless declared as ``nonlocal``. If a name is bound at the module\\nlevel, it is a global variable. (The variables of the module code\\nblock are local and global.) If a variable is used in a code block\\nbut not defined there, it is a *free variable*.\\n\\nWhen a name is not found at all, a ``NameError`` exception is raised.\\nIf the name refers to a local variable that has not been bound, a\\n``UnboundLocalError`` exception is raised. ``UnboundLocalError`` is a\\nsubclass of ``NameError``.\\n\\nThe following constructs bind names: formal parameters to functions,\\n``import`` statements, class and function definitions (these bind the\\nclass or function name in the defining block), and targets that are\\nidentifiers if occurring in an assignment, ``for`` loop header, or\\nafter ``as`` in a ``with`` statement or ``except`` clause. The\\n``import`` statement of the form ``from ... import *`` binds all names\\ndefined in the imported module, except those beginning with an\\nunderscore. This form may only be used at the module level.\\n\\nA target occurring in a ``del`` statement is also considered bound for\\nthis purpose (though the actual semantics are to unbind the name).\\n\\nEach assignment or import statement occurs within a block defined by a\\nclass or function definition or at the module level (the top-level\\ncode block).\\n\\nIf a name binding operation occurs anywhere within a code block, all\\nuses of the name within the block are treated as references to the\\ncurrent block. This can lead to errors when a name is used within a\\nblock before it is bound. This rule is subtle. Python lacks\\ndeclarations and allows name binding operations to occur anywhere\\nwithin a code block. The local variables of a code block can be\\ndetermined by scanning the entire text of the block for name binding\\noperations.\\n\\nIf the ``global`` statement occurs within a block, all uses of the\\nname specified in the statement refer to the binding of that name in\\nthe top-level namespace. Names are resolved in the top-level\\nnamespace by searching the global namespace, i.e. the namespace of the\\nmodule containing the code block, and the builtins namespace, the\\nnamespace of the module ``builtins``. The global namespace is\\nsearched first. If the name is not found there, the builtins\\nnamespace is searched. The global statement must precede all uses of\\nthe name.\\n\\nThe builtins namespace associated with the execution of a code block\\nis actually found by looking up the name ``__builtins__`` in its\\nglobal namespace; this should be a dictionary or a module (in the\\nlatter case the module's dictionary is used). By default, when in the\\n``__main__`` module, ``__builtins__`` is the built-in module\\n``builtins``; when in any other module, ``__builtins__`` is an alias\\nfor the dictionary of the ``builtins`` module itself.\\n``__builtins__`` can be set to a user-created dictionary to create a\\nweak form of restricted execution.\\n\\n**CPython implementation detail:** Users should not touch\\n``__builtins__``; it is strictly an implementation detail. Users\\nwanting to override values in the builtins namespace should ``import``\\nthe ``builtins`` module and modify its attributes appropriately.\\n\\nThe namespace for a module is automatically created the first time a\\nmodule is imported. The main module for a script is always called\\n``__main__``.\\n\\nThe ``global`` statement has the same scope as a name binding\\noperation in the same block. If the nearest enclosing scope for a\\nfree variable contains a global statement, the free variable is\\ntreated as a global.\\n\\nA class definition is an executable statement that may use and define\\nnames. These references follow the normal rules for name resolution.\\nThe namespace of the class definition becomes the attribute dictionary\\nof the class. Names defined at the class scope are not visible in\\nmethods.\\n\\n\\nInteraction with dynamic features\\n=================================\\n\\nThere are several cases where Python statements are illegal when used\\nin conjunction with nested scopes that contain free variables.\\n\\nIf a variable is referenced in an enclosing scope, it is illegal to\\ndelete the name. An error will be reported at compile time.\\n\\nIf the wild card form of import --- ``import *`` --- is used in a\\nfunction and the function contains or is a nested block with free\\nvariables, the compiler will raise a ``SyntaxError``.\\n\\nThe ``eval()`` and ``exec()`` functions do not have access to the full\\nenvironment for resolving names. Names may be resolved in the local\\nand global namespaces of the caller. Free variables are not resolved\\nin the nearest enclosing namespace, but in the global namespace. [1]\\nThe ``exec()`` and ``eval()`` functions have optional arguments to\\noverride the global and local namespace. If only one namespace is\\nspecified, it is used for both.\\n\",\n'nonlocal':'\\nThe ``nonlocal`` statement\\n**************************\\n\\n nonlocal_stmt ::= \"nonlocal\" identifier (\",\" identifier)*\\n\\nThe ``nonlocal`` statement causes the listed identifiers to refer to\\npreviously bound variables in the nearest enclosing scope. This is\\nimportant because the default behavior for binding is to search the\\nlocal namespace first. The statement allows encapsulated code to\\nrebind variables outside of the local scope besides the global\\n(module) scope.\\n\\nNames listed in a ``nonlocal`` statement, unlike to those listed in a\\n``global`` statement, must refer to pre-existing bindings in an\\nenclosing scope (the scope in which a new binding should be created\\ncannot be determined unambiguously).\\n\\nNames listed in a ``nonlocal`` statement must not collide with pre-\\nexisting bindings in the local scope.\\n\\nSee also:\\n\\n **PEP 3104** - Access to Names in Outer Scopes\\n The specification for the ``nonlocal`` statement.\\n',\n'numbers':\"\\nNumeric literals\\n****************\\n\\nThere are three types of numeric literals: integers, floating point\\nnumbers, and imaginary numbers. There are no complex literals\\n(complex numbers can be formed by adding a real number and an\\nimaginary number).\\n\\nNote that numeric literals do not include a sign; a phrase like ``-1``\\nis actually an expression composed of the unary operator '``-``' and\\nthe literal ``1``.\\n\",\n'numeric-types':\"\\nEmulating numeric types\\n***********************\\n\\nThe following methods can be defined to emulate numeric objects.\\nMethods corresponding to operations that are not supported by the\\nparticular kind of number implemented (e.g., bitwise operations for\\nnon-integral numbers) should be left undefined.\\n\\nobject.__add__(self, other)\\nobject.__sub__(self, other)\\nobject.__mul__(self, other)\\nobject.__truediv__(self, other)\\nobject.__floordiv__(self, other)\\nobject.__mod__(self, other)\\nobject.__divmod__(self, other)\\nobject.__pow__(self, other[, modulo])\\nobject.__lshift__(self, other)\\nobject.__rshift__(self, other)\\nobject.__and__(self, other)\\nobject.__xor__(self, other)\\nobject.__or__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``). For instance, to evaluate the expression ``x + y``, where\\n *x* is an instance of a class that has an ``__add__()`` method,\\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\\n should not be related to ``__truediv__()``. Note that\\n ``__pow__()`` should be defined to accept an optional third\\n argument if the ternary version of the built-in ``pow()`` function\\n is to be supported.\\n\\n If one of those methods does not support the operation with the\\n supplied arguments, it should return ``NotImplemented``.\\n\\nobject.__radd__(self, other)\\nobject.__rsub__(self, other)\\nobject.__rmul__(self, other)\\nobject.__rtruediv__(self, other)\\nobject.__rfloordiv__(self, other)\\nobject.__rmod__(self, other)\\nobject.__rdivmod__(self, other)\\nobject.__rpow__(self, other)\\nobject.__rlshift__(self, other)\\nobject.__rrshift__(self, other)\\nobject.__rand__(self, other)\\nobject.__rxor__(self, other)\\nobject.__ror__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``) with reflected (swapped) operands. These functions are only\\n called if the left operand does not support the corresponding\\n operation and the operands are of different types. [2] For\\n instance, to evaluate the expression ``x - y``, where *y* is an\\n instance of a class that has an ``__rsub__()`` method,\\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\\n *NotImplemented*.\\n\\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\\n (the coercion rules would become too complicated).\\n\\n Note: If the right operand's type is a subclass of the left operand's\\n type and that subclass provides the reflected method for the\\n operation, this method will be called before the left operand's\\n non-reflected method. This behavior allows subclasses to\\n override their ancestors' operations.\\n\\nobject.__iadd__(self, other)\\nobject.__isub__(self, other)\\nobject.__imul__(self, other)\\nobject.__itruediv__(self, other)\\nobject.__ifloordiv__(self, other)\\nobject.__imod__(self, other)\\nobject.__ipow__(self, other[, modulo])\\nobject.__ilshift__(self, other)\\nobject.__irshift__(self, other)\\nobject.__iand__(self, other)\\nobject.__ixor__(self, other)\\nobject.__ior__(self, other)\\n\\n These methods are called to implement the augmented arithmetic\\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\\n should attempt to do the operation in-place (modifying *self*) and\\n return the result (which could be, but does not have to be,\\n *self*). If a specific method is not defined, the augmented\\n assignment falls back to the normal methods. For instance, to\\n execute the statement ``x += y``, where *x* is an instance of a\\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\\n called. If *x* is an instance of a class that does not define a\\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\\n considered, as with the evaluation of ``x + y``.\\n\\nobject.__neg__(self)\\nobject.__pos__(self)\\nobject.__abs__(self)\\nobject.__invert__(self)\\n\\n Called to implement the unary arithmetic operations (``-``, ``+``,\\n ``abs()`` and ``~``).\\n\\nobject.__complex__(self)\\nobject.__int__(self)\\nobject.__float__(self)\\nobject.__round__(self[, n])\\n\\n Called to implement the built-in functions ``complex()``,\\n ``int()``, ``float()`` and ``round()``. Should return a value of\\n the appropriate type.\\n\\nobject.__index__(self)\\n\\n Called to implement ``operator.index()``. Also called whenever\\n Python needs an integer object (such as in slicing, or in the\\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\\n an integer.\\n\",\n'objects':'\\nObjects, values and types\\n*************************\\n\\n*Objects* are Python\\'s abstraction for data. All data in a Python\\nprogram is represented by objects or by relations between objects. (In\\na sense, and in conformance to Von Neumann\\'s model of a \"stored\\nprogram computer,\" code is also represented by objects.)\\n\\nEvery object has an identity, a type and a value. An object\\'s\\n*identity* never changes once it has been created; you may think of it\\nas the object\\'s address in memory. The \\'``is``\\' operator compares the\\nidentity of two objects; the ``id()`` function returns an integer\\nrepresenting its identity.\\n\\n**CPython implementation detail:** For CPython, ``id(x)`` is the\\nmemory address where ``x`` is stored.\\n\\nAn object\\'s type determines the operations that the object supports\\n(e.g., \"does it have a length?\") and also defines the possible values\\nfor objects of that type. The ``type()`` function returns an object\\'s\\ntype (which is an object itself). Like its identity, an object\\'s\\n*type* is also unchangeable. [1]\\n\\nThe *value* of some objects can change. Objects whose value can\\nchange are said to be *mutable*; objects whose value is unchangeable\\nonce they are created are called *immutable*. (The value of an\\nimmutable container object that contains a reference to a mutable\\nobject can change when the latter\\'s value is changed; however the\\ncontainer is still considered immutable, because the collection of\\nobjects it contains cannot be changed. So, immutability is not\\nstrictly the same as having an unchangeable value, it is more subtle.)\\nAn object\\'s mutability is determined by its type; for instance,\\nnumbers, strings and tuples are immutable, while dictionaries and\\nlists are mutable.\\n\\nObjects are never explicitly destroyed; however, when they become\\nunreachable they may be garbage-collected. An implementation is\\nallowed to postpone garbage collection or omit it altogether --- it is\\na matter of implementation quality how garbage collection is\\nimplemented, as long as no objects are collected that are still\\nreachable.\\n\\n**CPython implementation detail:** CPython currently uses a reference-\\ncounting scheme with (optional) delayed detection of cyclically linked\\ngarbage, which collects most objects as soon as they become\\nunreachable, but is not guaranteed to collect garbage containing\\ncircular references. See the documentation of the ``gc`` module for\\ninformation on controlling the collection of cyclic garbage. Other\\nimplementations act differently and CPython may change. Do not depend\\non immediate finalization of objects when they become unreachable (ex:\\nalways close files).\\n\\nNote that the use of the implementation\\'s tracing or debugging\\nfacilities may keep objects alive that would normally be collectable.\\nAlso note that catching an exception with a \\'``try``...``except``\\'\\nstatement may keep objects alive.\\n\\nSome objects contain references to \"external\" resources such as open\\nfiles or windows. It is understood that these resources are freed\\nwhen the object is garbage-collected, but since garbage collection is\\nnot guaranteed to happen, such objects also provide an explicit way to\\nrelease the external resource, usually a ``close()`` method. Programs\\nare strongly recommended to explicitly close such objects. The\\n\\'``try``...``finally``\\' statement and the \\'``with``\\' statement provide\\nconvenient ways to do this.\\n\\nSome objects contain references to other objects; these are called\\n*containers*. Examples of containers are tuples, lists and\\ndictionaries. The references are part of a container\\'s value. In\\nmost cases, when we talk about the value of a container, we imply the\\nvalues, not the identities of the contained objects; however, when we\\ntalk about the mutability of a container, only the identities of the\\nimmediately contained objects are implied. So, if an immutable\\ncontainer (like a tuple) contains a reference to a mutable object, its\\nvalue changes if that mutable object is changed.\\n\\nTypes affect almost all aspects of object behavior. Even the\\nimportance of object identity is affected in some sense: for immutable\\ntypes, operations that compute new values may actually return a\\nreference to any existing object with the same type and value, while\\nfor mutable objects this is not allowed. E.g., after ``a = 1; b =\\n1``, ``a`` and ``b`` may or may not refer to the same object with the\\nvalue one, depending on the implementation, but after ``c = []; d =\\n[]``, ``c`` and ``d`` are guaranteed to refer to two different,\\nunique, newly created empty lists. (Note that ``c = d = []`` assigns\\nthe same object to both ``c`` and ``d``.)\\n',\n'operator-summary':'\\nOperator precedence\\n*******************\\n\\nThe following table summarizes the operator precedences in Python,\\nfrom lowest precedence (least binding) to highest precedence (most\\nbinding). Operators in the same box have the same precedence. Unless\\nthe syntax is explicitly given, operators are binary. Operators in\\nthe same box group left to right (except for comparisons, including\\ntests, which all have the same precedence and chain from left to right\\n--- see section *Comparisons* --- and exponentiation, which groups\\nfrom right to left).\\n\\n+-------------------------------------------------+---------------------------------------+\\n| Operator | Description |\\n+=================================================+=======================================+\\n| ``lambda`` | Lambda expression |\\n+-------------------------------------------------+---------------------------------------+\\n| ``if`` -- ``else`` | Conditional expression |\\n+-------------------------------------------------+---------------------------------------+\\n| ``or`` | Boolean OR |\\n+-------------------------------------------------+---------------------------------------+\\n| ``and`` | Boolean AND |\\n+-------------------------------------------------+---------------------------------------+\\n| ``not`` ``x`` | Boolean NOT |\\n+-------------------------------------------------+---------------------------------------+\\n| ``in``, ``not in``, ``is``, ``is not``, ``<``, | Comparisons, including membership |\\n| ``<=``, ``>``, ``>=``, ``!=``, ``==`` | tests and identity tests, |\\n+-------------------------------------------------+---------------------------------------+\\n| ``|`` | Bitwise OR |\\n+-------------------------------------------------+---------------------------------------+\\n| ``^`` | Bitwise XOR |\\n+-------------------------------------------------+---------------------------------------+\\n| ``&`` | Bitwise AND |\\n+-------------------------------------------------+---------------------------------------+\\n| ``<<``, ``>>`` | Shifts |\\n+-------------------------------------------------+---------------------------------------+\\n| ``+``, ``-`` | Addition and subtraction |\\n+-------------------------------------------------+---------------------------------------+\\n| ``*``, ``/``, ``//``, ``%`` | Multiplication, division, remainder |\\n| | [5] |\\n+-------------------------------------------------+---------------------------------------+\\n| ``+x``, ``-x``, ``~x`` | Positive, negative, bitwise NOT |\\n+-------------------------------------------------+---------------------------------------+\\n| ``**`` | Exponentiation [6] |\\n+-------------------------------------------------+---------------------------------------+\\n| ``x[index]``, ``x[index:index]``, | Subscription, slicing, call, |\\n| ``x(arguments...)``, ``x.attribute`` | attribute reference |\\n+-------------------------------------------------+---------------------------------------+\\n| ``(expressions...)``, ``[expressions...]``, | Binding or tuple display, list |\\n| ``{key: value...}``, ``{expressions...}`` | display, dictionary display, set |\\n| | display |\\n+-------------------------------------------------+---------------------------------------+\\n\\n-[ Footnotes ]-\\n\\n[1] While ``abs(x%y) < abs(y)`` is true mathematically, for floats it\\n may not be true numerically due to roundoff. For example, and\\n assuming a platform on which a Python float is an IEEE 754 double-\\n precision number, in order that ``-1e-100 % 1e100`` have the same\\n sign as ``1e100``, the computed result is ``-1e-100 + 1e100``,\\n which is numerically exactly equal to ``1e100``. The function\\n ``math.fmod()`` returns a result whose sign matches the sign of\\n the first argument instead, and so returns ``-1e-100`` in this\\n case. Which approach is more appropriate depends on the\\n application.\\n\\n[2] If x is very close to an exact integer multiple of y, it\\'s\\n possible for ``x//y`` to be one larger than ``(x-x%y)//y`` due to\\n rounding. In such cases, Python returns the latter result, in\\n order to preserve that ``divmod(x,y)[0] * y + x % y`` be very\\n close to ``x``.\\n\\n[3] While comparisons between strings make sense at the byte level,\\n they may be counter-intuitive to users. For example, the strings\\n ``\"\\\\u00C7\"`` and ``\"\\\\u0327\\\\u0043\"`` compare differently, even\\n though they both represent the same unicode character (LATIN\\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\\n recognizable way, compare using ``unicodedata.normalize()``.\\n\\n[4] Due to automatic garbage-collection, free lists, and the dynamic\\n nature of descriptors, you may notice seemingly unusual behaviour\\n in certain uses of the ``is`` operator, like those involving\\n comparisons between instance methods, or constants. Check their\\n documentation for more info.\\n\\n[5] The ``%`` operator is also used for string formatting; the same\\n precedence applies.\\n\\n[6] The power operator ``**`` binds less tightly than an arithmetic or\\n bitwise unary operator on its right, that is, ``2**-1`` is\\n ``0.5``.\\n',\n'pass':'\\nThe ``pass`` statement\\n**********************\\n\\n pass_stmt ::= \"pass\"\\n\\n``pass`` is a null operation --- when it is executed, nothing happens.\\nIt is useful as a placeholder when a statement is required\\nsyntactically, but no code needs to be executed, for example:\\n\\n def f(arg): pass # a function that does nothing (yet)\\n\\n class C: pass # a class with no methods (yet)\\n',\n'power':'\\nThe power operator\\n******************\\n\\nThe power operator binds more tightly than unary operators on its\\nleft; it binds less tightly than unary operators on its right. The\\nsyntax is:\\n\\n power ::= primary [\"**\" u_expr]\\n\\nThus, in an unparenthesized sequence of power and unary operators, the\\noperators are evaluated from right to left (this does not constrain\\nthe evaluation order for the operands): ``-1**2`` results in ``-1``.\\n\\nThe power operator has the same semantics as the built-in ``pow()``\\nfunction, when called with two arguments: it yields its left argument\\nraised to the power of its right argument. The numeric arguments are\\nfirst converted to a common type, and the result is of that type.\\n\\nFor int operands, the result has the same type as the operands unless\\nthe second argument is negative; in that case, all arguments are\\nconverted to float and a float result is delivered. For example,\\n``10**2`` returns ``100``, but ``10**-2`` returns ``0.01``.\\n\\nRaising ``0.0`` to a negative power results in a\\n``ZeroDivisionError``. Raising a negative number to a fractional power\\nresults in a ``complex`` number. (In earlier versions it raised a\\n``ValueError``.)\\n',\n'raise':'\\nThe ``raise`` statement\\n***********************\\n\\n raise_stmt ::= \"raise\" [expression [\"from\" expression]]\\n\\nIf no expressions are present, ``raise`` re-raises the last exception\\nthat was active in the current scope. If no exception is active in\\nthe current scope, a ``RuntimeError`` exception is raised indicating\\nthat this is an error.\\n\\nOtherwise, ``raise`` evaluates the first expression as the exception\\nobject. It must be either a subclass or an instance of\\n``BaseException``. If it is a class, the exception instance will be\\nobtained when needed by instantiating the class with no arguments.\\n\\nThe *type* of the exception is the exception instance\\'s class, the\\n*value* is the instance itself.\\n\\nA traceback object is normally created automatically when an exception\\nis raised and attached to it as the ``__traceback__`` attribute, which\\nis writable. You can create an exception and set your own traceback in\\none step using the ``with_traceback()`` exception method (which\\nreturns the same exception instance, with its traceback set to its\\nargument), like so:\\n\\n raise Exception(\"foo occurred\").with_traceback(tracebackobj)\\n\\nThe ``from`` clause is used for exception chaining: if given, the\\nsecond *expression* must be another exception class or instance, which\\nwill then be attached to the raised exception as the ``__cause__``\\nattribute (which is writable). If the raised exception is not\\nhandled, both exceptions will be printed:\\n\\n >>> try:\\n ... print(1 / 0)\\n ... except Exception as exc:\\n ... raise RuntimeError(\"Something bad happened\") from exc\\n ...\\n Traceback (most recent call last):\\n File \"\", line 2, in \\n ZeroDivisionError: int division or modulo by zero\\n\\n The above exception was the direct cause of the following exception:\\n\\n Traceback (most recent call last):\\n File \"\", line 4, in \\n RuntimeError: Something bad happened\\n\\nA similar mechanism works implicitly if an exception is raised inside\\nan exception handler: the previous exception is then attached as the\\nnew exception\\'s ``__context__`` attribute:\\n\\n >>> try:\\n ... print(1 / 0)\\n ... except:\\n ... raise RuntimeError(\"Something bad happened\")\\n ...\\n Traceback (most recent call last):\\n File \"\", line 2, in \\n ZeroDivisionError: int division or modulo by zero\\n\\n During handling of the above exception, another exception occurred:\\n\\n Traceback (most recent call last):\\n File \"\", line 4, in \\n RuntimeError: Something bad happened\\n\\nAdditional information on exceptions can be found in section\\n*Exceptions*, and information about handling exceptions is in section\\n*The try statement*.\\n',\n'return':'\\nThe ``return`` statement\\n************************\\n\\n return_stmt ::= \"return\" [expression_list]\\n\\n``return`` may only occur syntactically nested in a function\\ndefinition, not within a nested class definition.\\n\\nIf an expression list is present, it is evaluated, else ``None`` is\\nsubstituted.\\n\\n``return`` leaves the current function call with the expression list\\n(or ``None``) as return value.\\n\\nWhen ``return`` passes control out of a ``try`` statement with a\\n``finally`` clause, that ``finally`` clause is executed before really\\nleaving the function.\\n\\nIn a generator function, the ``return`` statement indicates that the\\ngenerator is done and will cause ``StopIteration`` to be raised. The\\nreturned value (if any) is used as an argument to construct\\n``StopIteration`` and becomes the ``StopIteration.value`` attribute.\\n',\n'sequence-types':\"\\nEmulating container types\\n*************************\\n\\nThe following methods can be defined to implement container objects.\\nContainers usually are sequences (such as lists or tuples) or mappings\\n(like dictionaries), but can represent other containers as well. The\\nfirst set of methods is used either to emulate a sequence or to\\nemulate a mapping; the difference is that for a sequence, the\\nallowable keys should be the integers *k* for which ``0 <= k < N``\\nwhere *N* is the length of the sequence, or slice objects, which\\ndefine a range of items. It is also recommended that mappings provide\\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\\nand ``update()`` behaving similar to those for Python's standard\\ndictionary objects. The ``collections`` module provides a\\n``MutableMapping`` abstract base class to help create those methods\\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\\nlike Python standard list objects. Finally, sequence types should\\nimplement addition (meaning concatenation) and multiplication (meaning\\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\\ndescribed below; they should not define other numerical operators. It\\nis recommended that both mappings and sequences implement the\\n``__contains__()`` method to allow efficient use of the ``in``\\noperator; for mappings, ``in`` should search the mapping's keys; for\\nsequences, it should search through the values. It is further\\nrecommended that both mappings and sequences implement the\\n``__iter__()`` method to allow efficient iteration through the\\ncontainer; for mappings, ``__iter__()`` should be the same as\\n``keys()``; for sequences, it should iterate through the values.\\n\\nobject.__len__(self)\\n\\n Called to implement the built-in function ``len()``. Should return\\n the length of the object, an integer ``>=`` 0. Also, an object\\n that doesn't define a ``__bool__()`` method and whose ``__len__()``\\n method returns zero is considered to be false in a Boolean context.\\n\\nNote: Slicing is done exclusively with the following three methods. A\\n call like\\n\\n a[1:2] = b\\n\\n is translated to\\n\\n a[slice(1, 2, None)] = b\\n\\n and so forth. Missing slice items are always filled in with\\n ``None``.\\n\\nobject.__getitem__(self, key)\\n\\n Called to implement evaluation of ``self[key]``. For sequence\\n types, the accepted keys should be integers and slice objects.\\n Note that the special interpretation of negative indexes (if the\\n class wishes to emulate a sequence type) is up to the\\n ``__getitem__()`` method. If *key* is of an inappropriate type,\\n ``TypeError`` may be raised; if of a value outside the set of\\n indexes for the sequence (after any special interpretation of\\n negative values), ``IndexError`` should be raised. For mapping\\n types, if *key* is missing (not in the container), ``KeyError``\\n should be raised.\\n\\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\\n illegal indexes to allow proper detection of the end of the\\n sequence.\\n\\nobject.__setitem__(self, key, value)\\n\\n Called to implement assignment to ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support changes to the values for keys, or if new keys\\n can be added, or for sequences if elements can be replaced. The\\n same exceptions should be raised for improper *key* values as for\\n the ``__getitem__()`` method.\\n\\nobject.__delitem__(self, key)\\n\\n Called to implement deletion of ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support removal of keys, or for sequences if elements\\n can be removed from the sequence. The same exceptions should be\\n raised for improper *key* values as for the ``__getitem__()``\\n method.\\n\\nobject.__iter__(self)\\n\\n This method is called when an iterator is required for a container.\\n This method should return a new iterator object that can iterate\\n over all the objects in the container. For mappings, it should\\n iterate over the keys of the container, and should also be made\\n available as the method ``keys()``.\\n\\n Iterator objects also need to implement this method; they are\\n required to return themselves. For more information on iterator\\n objects, see *Iterator Types*.\\n\\nobject.__reversed__(self)\\n\\n Called (if present) by the ``reversed()`` built-in to implement\\n reverse iteration. It should return a new iterator object that\\n iterates over all the objects in the container in reverse order.\\n\\n If the ``__reversed__()`` method is not provided, the\\n ``reversed()`` built-in will fall back to using the sequence\\n protocol (``__len__()`` and ``__getitem__()``). Objects that\\n support the sequence protocol should only provide\\n ``__reversed__()`` if they can provide an implementation that is\\n more efficient than the one provided by ``reversed()``.\\n\\nThe membership test operators (``in`` and ``not in``) are normally\\nimplemented as an iteration through a sequence. However, container\\nobjects can supply the following special method with a more efficient\\nimplementation, which also does not require the object be a sequence.\\n\\nobject.__contains__(self, item)\\n\\n Called to implement membership test operators. Should return true\\n if *item* is in *self*, false otherwise. For mapping objects, this\\n should consider the keys of the mapping rather than the values or\\n the key-item pairs.\\n\\n For objects that don't define ``__contains__()``, the membership\\n test first tries iteration via ``__iter__()``, then the old\\n sequence iteration protocol via ``__getitem__()``, see *this\\n section in the language reference*.\\n\",\n'shifting':'\\nShifting operations\\n*******************\\n\\nThe shifting operations have lower priority than the arithmetic\\noperations:\\n\\n shift_expr ::= a_expr | shift_expr ( \"<<\" | \">>\" ) a_expr\\n\\nThese operators accept integers as arguments. They shift the first\\nargument to the left or right by the number of bits given by the\\nsecond argument.\\n\\nA right shift by *n* bits is defined as division by ``pow(2,n)``. A\\nleft shift by *n* bits is defined as multiplication with ``pow(2,n)``.\\n\\nNote: In the current implementation, the right-hand operand is required to\\n be at most ``sys.maxsize``. If the right-hand operand is larger\\n than ``sys.maxsize`` an ``OverflowError`` exception is raised.\\n',\n'slicings':'\\nSlicings\\n********\\n\\nA slicing selects a range of items in a sequence object (e.g., a\\nstring, tuple or list). Slicings may be used as expressions or as\\ntargets in assignment or ``del`` statements. The syntax for a\\nslicing:\\n\\n slicing ::= primary \"[\" slice_list \"]\"\\n slice_list ::= slice_item (\",\" slice_item)* [\",\"]\\n slice_item ::= expression | proper_slice\\n proper_slice ::= [lower_bound] \":\" [upper_bound] [ \":\" [stride] ]\\n lower_bound ::= expression\\n upper_bound ::= expression\\n stride ::= expression\\n\\nThere is ambiguity in the formal syntax here: anything that looks like\\nan expression list also looks like a slice list, so any subscription\\ncan be interpreted as a slicing. Rather than further complicating the\\nsyntax, this is disambiguated by defining that in this case the\\ninterpretation as a subscription takes priority over the\\ninterpretation as a slicing (this is the case if the slice list\\ncontains no proper slice).\\n\\nThe semantics for a slicing are as follows. The primary must evaluate\\nto a mapping object, and it is indexed (using the same\\n``__getitem__()`` method as normal subscription) with a key that is\\nconstructed from the slice list, as follows. If the slice list\\ncontains at least one comma, the key is a tuple containing the\\nconversion of the slice items; otherwise, the conversion of the lone\\nslice item is the key. The conversion of a slice item that is an\\nexpression is that expression. The conversion of a proper slice is a\\nslice object (see section *The standard type hierarchy*) whose\\n``start``, ``stop`` and ``step`` attributes are the values of the\\nexpressions given as lower bound, upper bound and stride,\\nrespectively, substituting ``None`` for missing expressions.\\n',\n'specialattrs':'\\nSpecial Attributes\\n******************\\n\\nThe implementation adds a few special read-only attributes to several\\nobject types, where they are relevant. Some of these are not reported\\nby the ``dir()`` built-in function.\\n\\nobject.__dict__\\n\\n A dictionary or other mapping object used to store an object\\'s\\n (writable) attributes.\\n\\ninstance.__class__\\n\\n The class to which a class instance belongs.\\n\\nclass.__bases__\\n\\n The tuple of base classes of a class object.\\n\\nclass.__name__\\n\\n The name of the class or type.\\n\\nclass.__qualname__\\n\\n The *qualified name* of the class or type.\\n\\n New in version 3.3.\\n\\nclass.__mro__\\n\\n This attribute is a tuple of classes that are considered when\\n looking for base classes during method resolution.\\n\\nclass.mro()\\n\\n This method can be overridden by a metaclass to customize the\\n method resolution order for its instances. It is called at class\\n instantiation, and its result is stored in ``__mro__``.\\n\\nclass.__subclasses__()\\n\\n Each class keeps a list of weak references to its immediate\\n subclasses. This method returns a list of all those references\\n still alive. Example:\\n\\n >>> int.__subclasses__()\\n []\\n\\n-[ Footnotes ]-\\n\\n[1] Additional information on these special methods may be found in\\n the Python Reference Manual (*Basic customization*).\\n\\n[2] As a consequence, the list ``[1, 2]`` is considered equal to\\n ``[1.0, 2.0]``, and similarly for tuples.\\n\\n[3] They must have since the parser can\\'t tell the type of the\\n operands.\\n\\n[4] Cased characters are those with general category property being\\n one of \"Lu\" (Letter, uppercase), \"Ll\" (Letter, lowercase), or \"Lt\"\\n (Letter, titlecase).\\n\\n[5] To format only a tuple you should therefore provide a singleton\\n tuple whose only element is the tuple to be formatted.\\n',\n'specialnames':'\\nSpecial method names\\n********************\\n\\nA class can implement certain operations that are invoked by special\\nsyntax (such as arithmetic operations or subscripting and slicing) by\\ndefining methods with special names. This is Python\\'s approach to\\n*operator overloading*, allowing classes to define their own behavior\\nwith respect to language operators. For instance, if a class defines\\na method named ``__getitem__()``, and ``x`` is an instance of this\\nclass, then ``x[i]`` is roughly equivalent to ``type(x).__getitem__(x,\\ni)``. Except where mentioned, attempts to execute an operation raise\\nan exception when no appropriate method is defined (typically\\n``AttributeError`` or ``TypeError``).\\n\\nWhen implementing a class that emulates any built-in type, it is\\nimportant that the emulation only be implemented to the degree that it\\nmakes sense for the object being modelled. For example, some\\nsequences may work well with retrieval of individual elements, but\\nextracting a slice may not make sense. (One example of this is the\\n``NodeList`` interface in the W3C\\'s Document Object Model.)\\n\\n\\nBasic customization\\n===================\\n\\nobject.__new__(cls[, ...])\\n\\n Called to create a new instance of class *cls*. ``__new__()`` is a\\n static method (special-cased so you need not declare it as such)\\n that takes the class of which an instance was requested as its\\n first argument. The remaining arguments are those passed to the\\n object constructor expression (the call to the class). The return\\n value of ``__new__()`` should be the new object instance (usually\\n an instance of *cls*).\\n\\n Typical implementations create a new instance of the class by\\n invoking the superclass\\'s ``__new__()`` method using\\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\\n arguments and then modifying the newly-created instance as\\n necessary before returning it.\\n\\n If ``__new__()`` returns an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will be invoked like\\n ``__init__(self[, ...])``, where *self* is the new instance and the\\n remaining arguments are the same as were passed to ``__new__()``.\\n\\n If ``__new__()`` does not return an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will not be invoked.\\n\\n ``__new__()`` is intended mainly to allow subclasses of immutable\\n types (like int, str, or tuple) to customize instance creation. It\\n is also commonly overridden in custom metaclasses in order to\\n customize class creation.\\n\\nobject.__init__(self[, ...])\\n\\n Called when the instance is created. The arguments are those\\n passed to the class constructor expression. If a base class has an\\n ``__init__()`` method, the derived class\\'s ``__init__()`` method,\\n if any, must explicitly call it to ensure proper initialization of\\n the base class part of the instance; for example:\\n ``BaseClass.__init__(self, [args...])``. As a special constraint\\n on constructors, no value may be returned; doing so will cause a\\n ``TypeError`` to be raised at runtime.\\n\\nobject.__del__(self)\\n\\n Called when the instance is about to be destroyed. This is also\\n called a destructor. If a base class has a ``__del__()`` method,\\n the derived class\\'s ``__del__()`` method, if any, must explicitly\\n call it to ensure proper deletion of the base class part of the\\n instance. Note that it is possible (though not recommended!) for\\n the ``__del__()`` method to postpone destruction of the instance by\\n creating a new reference to it. It may then be called at a later\\n time when this new reference is deleted. It is not guaranteed that\\n ``__del__()`` methods are called for objects that still exist when\\n the interpreter exits.\\n\\n Note: ``del x`` doesn\\'t directly call ``x.__del__()`` --- the former\\n decrements the reference count for ``x`` by one, and the latter\\n is only called when ``x``\\'s reference count reaches zero. Some\\n common situations that may prevent the reference count of an\\n object from going to zero include: circular references between\\n objects (e.g., a doubly-linked list or a tree data structure with\\n parent and child pointers); a reference to the object on the\\n stack frame of a function that caught an exception (the traceback\\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\\n a reference to the object on the stack frame that raised an\\n unhandled exception in interactive mode (the traceback stored in\\n ``sys.last_traceback`` keeps the stack frame alive). The first\\n situation can only be remedied by explicitly breaking the cycles;\\n the latter two situations can be resolved by storing ``None`` in\\n ``sys.last_traceback``. Circular references which are garbage are\\n detected when the option cycle detector is enabled (it\\'s on by\\n default), but can only be cleaned up if there are no Python-\\n level ``__del__()`` methods involved. Refer to the documentation\\n for the ``gc`` module for more information about how\\n ``__del__()`` methods are handled by the cycle detector,\\n particularly the description of the ``garbage`` value.\\n\\n Warning: Due to the precarious circumstances under which ``__del__()``\\n methods are invoked, exceptions that occur during their execution\\n are ignored, and a warning is printed to ``sys.stderr`` instead.\\n Also, when ``__del__()`` is invoked in response to a module being\\n deleted (e.g., when execution of the program is done), other\\n globals referenced by the ``__del__()`` method may already have\\n been deleted or in the process of being torn down (e.g. the\\n import machinery shutting down). For this reason, ``__del__()``\\n methods should do the absolute minimum needed to maintain\\n external invariants. Starting with version 1.5, Python\\n guarantees that globals whose name begins with a single\\n underscore are deleted from their module before other globals are\\n deleted; if no other references to such globals exist, this may\\n help in assuring that imported modules are still available at the\\n time when the ``__del__()`` method is called.\\n\\nobject.__repr__(self)\\n\\n Called by the ``repr()`` built-in function to compute the\\n \"official\" string representation of an object. If at all possible,\\n this should look like a valid Python expression that could be used\\n to recreate an object with the same value (given an appropriate\\n environment). If this is not possible, a string of the form\\n ``<...some useful description...>`` should be returned. The return\\n value must be a string object. If a class defines ``__repr__()``\\n but not ``__str__()``, then ``__repr__()`` is also used when an\\n \"informal\" string representation of instances of that class is\\n required.\\n\\n This is typically used for debugging, so it is important that the\\n representation is information-rich and unambiguous.\\n\\nobject.__str__(self)\\n\\n Called by ``str(object)`` and the built-in functions ``format()``\\n and ``print()`` to compute the \"informal\" or nicely printable\\n string representation of an object. The return value must be a\\n *string* object.\\n\\n This method differs from ``object.__repr__()`` in that there is no\\n expectation that ``__str__()`` return a valid Python expression: a\\n more convenient or concise representation can be used.\\n\\n The default implementation defined by the built-in type ``object``\\n calls ``object.__repr__()``.\\n\\nobject.__bytes__(self)\\n\\n Called by ``bytes()`` to compute a byte-string representation of an\\n object. This should return a ``bytes`` object.\\n\\nobject.__format__(self, format_spec)\\n\\n Called by the ``format()`` built-in function (and by extension, the\\n ``str.format()`` method of class ``str``) to produce a \"formatted\"\\n string representation of an object. The ``format_spec`` argument is\\n a string that contains a description of the formatting options\\n desired. The interpretation of the ``format_spec`` argument is up\\n to the type implementing ``__format__()``, however most classes\\n will either delegate formatting to one of the built-in types, or\\n use a similar formatting option syntax.\\n\\n See *Format Specification Mini-Language* for a description of the\\n standard formatting syntax.\\n\\n The return value must be a string object.\\n\\nobject.__lt__(self, other)\\nobject.__le__(self, other)\\nobject.__eq__(self, other)\\nobject.__ne__(self, other)\\nobject.__gt__(self, other)\\nobject.__ge__(self, other)\\n\\n These are the so-called \"rich comparison\" methods. The\\n correspondence between operator symbols and method names is as\\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\\n ``x.__ge__(y)``.\\n\\n A rich comparison method may return the singleton\\n ``NotImplemented`` if it does not implement the operation for a\\n given pair of arguments. By convention, ``False`` and ``True`` are\\n returned for a successful comparison. However, these methods can\\n return any value, so if the comparison operator is used in a\\n Boolean context (e.g., in the condition of an ``if`` statement),\\n Python will call ``bool()`` on the value to determine if the result\\n is true or false.\\n\\n There are no implied relationships among the comparison operators.\\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\\n Accordingly, when defining ``__eq__()``, one should also define\\n ``__ne__()`` so that the operators will behave as expected. See\\n the paragraph on ``__hash__()`` for some important notes on\\n creating *hashable* objects which support custom comparison\\n operations and are usable as dictionary keys.\\n\\n There are no swapped-argument versions of these methods (to be used\\n when the left argument does not support the operation but the right\\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\\n other\\'s reflection, ``__le__()`` and ``__ge__()`` are each other\\'s\\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\\n reflection.\\n\\n Arguments to rich comparison methods are never coerced.\\n\\n To automatically generate ordering operations from a single root\\n operation, see ``functools.total_ordering()``.\\n\\nobject.__hash__(self)\\n\\n Called by built-in function ``hash()`` and for operations on\\n members of hashed collections including ``set``, ``frozenset``, and\\n ``dict``. ``__hash__()`` should return an integer. The only\\n required property is that objects which compare equal have the same\\n hash value; it is advised to somehow mix together (e.g. using\\n exclusive or) the hash values for the components of the object that\\n also play a part in comparison of objects.\\n\\n If a class does not define an ``__eq__()`` method it should not\\n define a ``__hash__()`` operation either; if it defines\\n ``__eq__()`` but not ``__hash__()``, its instances will not be\\n usable as items in hashable collections. If a class defines\\n mutable objects and implements an ``__eq__()`` method, it should\\n not implement ``__hash__()``, since the implementation of hashable\\n collections requires that a key\\'s hash value is immutable (if the\\n object\\'s hash value changes, it will be in the wrong hash bucket).\\n\\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\\n by default; with them, all objects compare unequal (except with\\n themselves) and ``x.__hash__()`` returns an appropriate value such\\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\\n hash(y)``.\\n\\n A class that overrides ``__eq__()`` and does not define\\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\\n ``None``. When the ``__hash__()`` method of a class is ``None``,\\n instances of the class will raise an appropriate ``TypeError`` when\\n a program attempts to retrieve their hash value, and will also be\\n correctly identified as unhashable when checking ``isinstance(obj,\\n collections.Hashable``).\\n\\n If a class that overrides ``__eq__()`` needs to retain the\\n implementation of ``__hash__()`` from a parent class, the\\n interpreter must be told this explicitly by setting ``__hash__ =\\n .__hash__``.\\n\\n If a class that does not override ``__eq__()`` wishes to suppress\\n hash support, it should include ``__hash__ = None`` in the class\\n definition. A class which defines its own ``__hash__()`` that\\n explicitly raises a ``TypeError`` would be incorrectly identified\\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\\n\\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\\n objects are \"salted\" with an unpredictable random value.\\n Although they remain constant within an individual Python\\n process, they are not predictable between repeated invocations of\\n Python.This is intended to provide protection against a denial-\\n of-service caused by carefully-chosen inputs that exploit the\\n worst case performance of a dict insertion, O(n^2) complexity.\\n See http://www.ocert.org/advisories/ocert-2011-003.html for\\n details.Changing hash values affects the iteration order of\\n dicts, sets and other mappings. Python has never made guarantees\\n about this ordering (and it typically varies between 32-bit and\\n 64-bit builds).See also ``PYTHONHASHSEED``.\\n\\n Changed in version 3.3: Hash randomization is enabled by default.\\n\\nobject.__bool__(self)\\n\\n Called to implement truth value testing and the built-in operation\\n ``bool()``; should return ``False`` or ``True``. When this method\\n is not defined, ``__len__()`` is called, if it is defined, and the\\n object is considered true if its result is nonzero. If a class\\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\\n are considered true.\\n\\n\\nCustomizing attribute access\\n============================\\n\\nThe following methods can be defined to customize the meaning of\\nattribute access (use of, assignment to, or deletion of ``x.name``)\\nfor class instances.\\n\\nobject.__getattr__(self, name)\\n\\n Called when an attribute lookup has not found the attribute in the\\n usual places (i.e. it is not an instance attribute nor is it found\\n in the class tree for ``self``). ``name`` is the attribute name.\\n This method should return the (computed) attribute value or raise\\n an ``AttributeError`` exception.\\n\\n Note that if the attribute is found through the normal mechanism,\\n ``__getattr__()`` is not called. (This is an intentional asymmetry\\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\\n for efficiency reasons and because otherwise ``__getattr__()``\\n would have no way to access other attributes of the instance. Note\\n that at least for instance variables, you can fake total control by\\n not inserting any values in the instance attribute dictionary (but\\n instead inserting them in another object). See the\\n ``__getattribute__()`` method below for a way to actually get total\\n control over attribute access.\\n\\nobject.__getattribute__(self, name)\\n\\n Called unconditionally to implement attribute accesses for\\n instances of the class. If the class also defines\\n ``__getattr__()``, the latter will not be called unless\\n ``__getattribute__()`` either calls it explicitly or raises an\\n ``AttributeError``. This method should return the (computed)\\n attribute value or raise an ``AttributeError`` exception. In order\\n to avoid infinite recursion in this method, its implementation\\n should always call the base class method with the same name to\\n access any attributes it needs, for example,\\n ``object.__getattribute__(self, name)``.\\n\\n Note: This method may still be bypassed when looking up special methods\\n as the result of implicit invocation via language syntax or\\n built-in functions. See *Special method lookup*.\\n\\nobject.__setattr__(self, name, value)\\n\\n Called when an attribute assignment is attempted. This is called\\n instead of the normal mechanism (i.e. store the value in the\\n instance dictionary). *name* is the attribute name, *value* is the\\n value to be assigned to it.\\n\\n If ``__setattr__()`` wants to assign to an instance attribute, it\\n should call the base class method with the same name, for example,\\n ``object.__setattr__(self, name, value)``.\\n\\nobject.__delattr__(self, name)\\n\\n Like ``__setattr__()`` but for attribute deletion instead of\\n assignment. This should only be implemented if ``del obj.name`` is\\n meaningful for the object.\\n\\nobject.__dir__(self)\\n\\n Called when ``dir()`` is called on the object. A sequence must be\\n returned. ``dir()`` converts the returned sequence to a list and\\n sorts it.\\n\\n\\nImplementing Descriptors\\n------------------------\\n\\nThe following methods only apply when an instance of the class\\ncontaining the method (a so-called *descriptor* class) appears in an\\n*owner* class (the descriptor must be in either the owner\\'s class\\ndictionary or in the class dictionary for one of its parents). In the\\nexamples below, \"the attribute\" refers to the attribute whose name is\\nthe key of the property in the owner class\\' ``__dict__``.\\n\\nobject.__get__(self, instance, owner)\\n\\n Called to get the attribute of the owner class (class attribute\\n access) or of an instance of that class (instance attribute\\n access). *owner* is always the owner class, while *instance* is the\\n instance that the attribute was accessed through, or ``None`` when\\n the attribute is accessed through the *owner*. This method should\\n return the (computed) attribute value or raise an\\n ``AttributeError`` exception.\\n\\nobject.__set__(self, instance, value)\\n\\n Called to set the attribute on an instance *instance* of the owner\\n class to a new value, *value*.\\n\\nobject.__delete__(self, instance)\\n\\n Called to delete the attribute on an instance *instance* of the\\n owner class.\\n\\n\\nInvoking Descriptors\\n--------------------\\n\\nIn general, a descriptor is an object attribute with \"binding\\nbehavior\", one whose attribute access has been overridden by methods\\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\\n``__delete__()``. If any of those methods are defined for an object,\\nit is said to be a descriptor.\\n\\nThe default behavior for attribute access is to get, set, or delete\\nthe attribute from an object\\'s dictionary. For instance, ``a.x`` has a\\nlookup chain starting with ``a.__dict__[\\'x\\']``, then\\n``type(a).__dict__[\\'x\\']``, and continuing through the base classes of\\n``type(a)`` excluding metaclasses.\\n\\nHowever, if the looked-up value is an object defining one of the\\ndescriptor methods, then Python may override the default behavior and\\ninvoke the descriptor method instead. Where this occurs in the\\nprecedence chain depends on which descriptor methods were defined and\\nhow they were called.\\n\\nThe starting point for descriptor invocation is a binding, ``a.x``.\\nHow the arguments are assembled depends on ``a``:\\n\\nDirect Call\\n The simplest and least common call is when user code directly\\n invokes a descriptor method: ``x.__get__(a)``.\\n\\nInstance Binding\\n If binding to an object instance, ``a.x`` is transformed into the\\n call: ``type(a).__dict__[\\'x\\'].__get__(a, type(a))``.\\n\\nClass Binding\\n If binding to a class, ``A.x`` is transformed into the call:\\n ``A.__dict__[\\'x\\'].__get__(None, A)``.\\n\\nSuper Binding\\n If ``a`` is an instance of ``super``, then the binding ``super(B,\\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\\n ``A`` immediately preceding ``B`` and then invokes the descriptor\\n with the call: ``A.__dict__[\\'m\\'].__get__(obj, obj.__class__)``.\\n\\nFor instance bindings, the precedence of descriptor invocation depends\\non the which descriptor methods are defined. A descriptor can define\\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\\nIf it does not define ``__get__()``, then accessing the attribute will\\nreturn the descriptor object itself unless there is a value in the\\nobject\\'s instance dictionary. If the descriptor defines ``__set__()``\\nand/or ``__delete__()``, it is a data descriptor; if it defines\\nneither, it is a non-data descriptor. Normally, data descriptors\\ndefine both ``__get__()`` and ``__set__()``, while non-data\\ndescriptors have just the ``__get__()`` method. Data descriptors with\\n``__set__()`` and ``__get__()`` defined always override a redefinition\\nin an instance dictionary. In contrast, non-data descriptors can be\\noverridden by instances.\\n\\nPython methods (including ``staticmethod()`` and ``classmethod()``)\\nare implemented as non-data descriptors. Accordingly, instances can\\nredefine and override methods. This allows individual instances to\\nacquire behaviors that differ from other instances of the same class.\\n\\nThe ``property()`` function is implemented as a data descriptor.\\nAccordingly, instances cannot override the behavior of a property.\\n\\n\\n__slots__\\n---------\\n\\nBy default, instances of classes have a dictionary for attribute\\nstorage. This wastes space for objects having very few instance\\nvariables. The space consumption can become acute when creating large\\nnumbers of instances.\\n\\nThe default can be overridden by defining *__slots__* in a class\\ndefinition. The *__slots__* declaration takes a sequence of instance\\nvariables and reserves just enough space in each instance to hold a\\nvalue for each variable. Space is saved because *__dict__* is not\\ncreated for each instance.\\n\\nobject.__slots__\\n\\n This class variable can be assigned a string, iterable, or sequence\\n of strings with variable names used by instances. If defined in a\\n class, *__slots__* reserves space for the declared variables and\\n prevents the automatic creation of *__dict__* and *__weakref__* for\\n each instance.\\n\\n\\nNotes on using *__slots__*\\n~~~~~~~~~~~~~~~~~~~~~~~~~~\\n\\n* When inheriting from a class without *__slots__*, the *__dict__*\\n attribute of that class will always be accessible, so a *__slots__*\\n definition in the subclass is meaningless.\\n\\n* Without a *__dict__* variable, instances cannot be assigned new\\n variables not listed in the *__slots__* definition. Attempts to\\n assign to an unlisted variable name raises ``AttributeError``. If\\n dynamic assignment of new variables is desired, then add\\n ``\\'__dict__\\'`` to the sequence of strings in the *__slots__*\\n declaration.\\n\\n* Without a *__weakref__* variable for each instance, classes defining\\n *__slots__* do not support weak references to its instances. If weak\\n reference support is needed, then add ``\\'__weakref__\\'`` to the\\n sequence of strings in the *__slots__* declaration.\\n\\n* *__slots__* are implemented at the class level by creating\\n descriptors (*Implementing Descriptors*) for each variable name. As\\n a result, class attributes cannot be used to set default values for\\n instance variables defined by *__slots__*; otherwise, the class\\n attribute would overwrite the descriptor assignment.\\n\\n* The action of a *__slots__* declaration is limited to the class\\n where it is defined. As a result, subclasses will have a *__dict__*\\n unless they also define *__slots__* (which must only contain names\\n of any *additional* slots).\\n\\n* If a class defines a slot also defined in a base class, the instance\\n variable defined by the base class slot is inaccessible (except by\\n retrieving its descriptor directly from the base class). This\\n renders the meaning of the program undefined. In the future, a\\n check may be added to prevent this.\\n\\n* Nonempty *__slots__* does not work for classes derived from\\n \"variable-length\" built-in types such as ``int``, ``str`` and\\n ``tuple``.\\n\\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\\n also be used; however, in the future, special meaning may be\\n assigned to the values corresponding to each key.\\n\\n* *__class__* assignment works only if both classes have the same\\n *__slots__*.\\n\\n\\nCustomizing class creation\\n==========================\\n\\nBy default, classes are constructed using ``type()``. The class body\\nis executed in a new namespace and the class name is bound locally to\\nthe result of ``type(name, bases, namespace)``.\\n\\nThe class creation process can be customised by passing the\\n``metaclass`` keyword argument in the class definition line, or by\\ninheriting from an existing class that included such an argument. In\\nthe following example, both ``MyClass`` and ``MySubclass`` are\\ninstances of ``Meta``:\\n\\n class Meta(type):\\n pass\\n\\n class MyClass(metaclass=Meta):\\n pass\\n\\n class MySubclass(MyClass):\\n pass\\n\\nAny other keyword arguments that are specified in the class definition\\nare passed through to all metaclass operations described below.\\n\\nWhen a class definition is executed, the following steps occur:\\n\\n* the appropriate metaclass is determined\\n\\n* the class namespace is prepared\\n\\n* the class body is executed\\n\\n* the class object is created\\n\\n\\nDetermining the appropriate metaclass\\n-------------------------------------\\n\\nThe appropriate metaclass for a class definition is determined as\\nfollows:\\n\\n* if no bases and no explicit metaclass are given, then ``type()`` is\\n used\\n\\n* if an explicit metaclass is given and it is *not* an instance of\\n ``type()``, then it is used directly as the metaclass\\n\\n* if an instance of ``type()`` is given as the explicit metaclass, or\\n bases are defined, then the most derived metaclass is used\\n\\nThe most derived metaclass is selected from the explicitly specified\\nmetaclass (if any) and the metaclasses (i.e. ``type(cls)``) of all\\nspecified base classes. The most derived metaclass is one which is a\\nsubtype of *all* of these candidate metaclasses. If none of the\\ncandidate metaclasses meets that criterion, then the class definition\\nwill fail with ``TypeError``.\\n\\n\\nPreparing the class namespace\\n-----------------------------\\n\\nOnce the appropriate metaclass has been identified, then the class\\nnamespace is prepared. If the metaclass has a ``__prepare__``\\nattribute, it is called as ``namespace = metaclass.__prepare__(name,\\nbases, **kwds)`` (where the additional keyword arguments, if any, come\\nfrom the class definition).\\n\\nIf the metaclass has no ``__prepare__`` attribute, then the class\\nnamespace is initialised as an empty ``dict()`` instance.\\n\\nSee also:\\n\\n **PEP 3115** - Metaclasses in Python 3000\\n Introduced the ``__prepare__`` namespace hook\\n\\n\\nExecuting the class body\\n------------------------\\n\\nThe class body is executed (approximately) as ``exec(body, globals(),\\nnamespace)``. The key difference from a normal call to ``exec()`` is\\nthat lexical scoping allows the class body (including any methods) to\\nreference names from the current and outer scopes when the class\\ndefinition occurs inside a function.\\n\\nHowever, even when the class definition occurs inside the function,\\nmethods defined inside the class still cannot see names defined at the\\nclass scope. Class variables must be accessed through the first\\nparameter of instance or class methods, and cannot be accessed at all\\nfrom static methods.\\n\\n\\nCreating the class object\\n-------------------------\\n\\nOnce the class namespace has been populated by executing the class\\nbody, the class object is created by calling ``metaclass(name, bases,\\nnamespace, **kwds)`` (the additional keywords passed here are the same\\nas those passed to ``__prepare__``).\\n\\nThis class object is the one that will be referenced by the zero-\\nargument form of ``super()``. ``__class__`` is an implicit closure\\nreference created by the compiler if any methods in a class body refer\\nto either ``__class__`` or ``super``. This allows the zero argument\\nform of ``super()`` to correctly identify the class being defined\\nbased on lexical scoping, while the class or instance that was used to\\nmake the current call is identified based on the first argument passed\\nto the method.\\n\\nAfter the class object is created, it is passed to the class\\ndecorators included in the class definition (if any) and the resulting\\nobject is bound in the local namespace as the defined class.\\n\\nSee also:\\n\\n **PEP 3135** - New super\\n Describes the implicit ``__class__`` closure reference\\n\\n\\nMetaclass example\\n-----------------\\n\\nThe potential uses for metaclasses are boundless. Some ideas that have\\nbeen explored include logging, interface checking, automatic\\ndelegation, automatic property creation, proxies, frameworks, and\\nautomatic resource locking/synchronization.\\n\\nHere is an example of a metaclass that uses an\\n``collections.OrderedDict`` to remember the order that class members\\nwere defined:\\n\\n class OrderedClass(type):\\n\\n @classmethod\\n def __prepare__(metacls, name, bases, **kwds):\\n return collections.OrderedDict()\\n\\n def __new__(cls, name, bases, namespace, **kwds):\\n result = type.__new__(cls, name, bases, dict(namespace))\\n result.members = tuple(namespace)\\n return result\\n\\n class A(metaclass=OrderedClass):\\n def one(self): pass\\n def two(self): pass\\n def three(self): pass\\n def four(self): pass\\n\\n >>> A.members\\n (\\'__module__\\', \\'one\\', \\'two\\', \\'three\\', \\'four\\')\\n\\nWhen the class definition for *A* gets executed, the process begins\\nwith calling the metaclass\\'s ``__prepare__()`` method which returns an\\nempty ``collections.OrderedDict``. That mapping records the methods\\nand attributes of *A* as they are defined within the body of the class\\nstatement. Once those definitions are executed, the ordered dictionary\\nis fully populated and the metaclass\\'s ``__new__()`` method gets\\ninvoked. That method builds the new type and it saves the ordered\\ndictionary keys in an attribute called ``members``.\\n\\n\\nCustomizing instance and subclass checks\\n========================================\\n\\nThe following methods are used to override the default behavior of the\\n``isinstance()`` and ``issubclass()`` built-in functions.\\n\\nIn particular, the metaclass ``abc.ABCMeta`` implements these methods\\nin order to allow the addition of Abstract Base Classes (ABCs) as\\n\"virtual base classes\" to any class or type (including built-in\\ntypes), including other ABCs.\\n\\nclass.__instancecheck__(self, instance)\\n\\n Return true if *instance* should be considered a (direct or\\n indirect) instance of *class*. If defined, called to implement\\n ``isinstance(instance, class)``.\\n\\nclass.__subclasscheck__(self, subclass)\\n\\n Return true if *subclass* should be considered a (direct or\\n indirect) subclass of *class*. If defined, called to implement\\n ``issubclass(subclass, class)``.\\n\\nNote that these methods are looked up on the type (metaclass) of a\\nclass. They cannot be defined as class methods in the actual class.\\nThis is consistent with the lookup of special methods that are called\\non instances, only in this case the instance is itself a class.\\n\\nSee also:\\n\\n **PEP 3119** - Introducing Abstract Base Classes\\n Includes the specification for customizing ``isinstance()`` and\\n ``issubclass()`` behavior through ``__instancecheck__()`` and\\n ``__subclasscheck__()``, with motivation for this functionality\\n in the context of adding Abstract Base Classes (see the ``abc``\\n module) to the language.\\n\\n\\nEmulating callable objects\\n==========================\\n\\nobject.__call__(self[, args...])\\n\\n Called when the instance is \"called\" as a function; if this method\\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\\n ``x.__call__(arg1, arg2, ...)``.\\n\\n\\nEmulating container types\\n=========================\\n\\nThe following methods can be defined to implement container objects.\\nContainers usually are sequences (such as lists or tuples) or mappings\\n(like dictionaries), but can represent other containers as well. The\\nfirst set of methods is used either to emulate a sequence or to\\nemulate a mapping; the difference is that for a sequence, the\\nallowable keys should be the integers *k* for which ``0 <= k < N``\\nwhere *N* is the length of the sequence, or slice objects, which\\ndefine a range of items. It is also recommended that mappings provide\\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\\nand ``update()`` behaving similar to those for Python\\'s standard\\ndictionary objects. The ``collections`` module provides a\\n``MutableMapping`` abstract base class to help create those methods\\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\\nlike Python standard list objects. Finally, sequence types should\\nimplement addition (meaning concatenation) and multiplication (meaning\\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\\ndescribed below; they should not define other numerical operators. It\\nis recommended that both mappings and sequences implement the\\n``__contains__()`` method to allow efficient use of the ``in``\\noperator; for mappings, ``in`` should search the mapping\\'s keys; for\\nsequences, it should search through the values. It is further\\nrecommended that both mappings and sequences implement the\\n``__iter__()`` method to allow efficient iteration through the\\ncontainer; for mappings, ``__iter__()`` should be the same as\\n``keys()``; for sequences, it should iterate through the values.\\n\\nobject.__len__(self)\\n\\n Called to implement the built-in function ``len()``. Should return\\n the length of the object, an integer ``>=`` 0. Also, an object\\n that doesn\\'t define a ``__bool__()`` method and whose ``__len__()``\\n method returns zero is considered to be false in a Boolean context.\\n\\nNote: Slicing is done exclusively with the following three methods. A\\n call like\\n\\n a[1:2] = b\\n\\n is translated to\\n\\n a[slice(1, 2, None)] = b\\n\\n and so forth. Missing slice items are always filled in with\\n ``None``.\\n\\nobject.__getitem__(self, key)\\n\\n Called to implement evaluation of ``self[key]``. For sequence\\n types, the accepted keys should be integers and slice objects.\\n Note that the special interpretation of negative indexes (if the\\n class wishes to emulate a sequence type) is up to the\\n ``__getitem__()`` method. If *key* is of an inappropriate type,\\n ``TypeError`` may be raised; if of a value outside the set of\\n indexes for the sequence (after any special interpretation of\\n negative values), ``IndexError`` should be raised. For mapping\\n types, if *key* is missing (not in the container), ``KeyError``\\n should be raised.\\n\\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\\n illegal indexes to allow proper detection of the end of the\\n sequence.\\n\\nobject.__setitem__(self, key, value)\\n\\n Called to implement assignment to ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support changes to the values for keys, or if new keys\\n can be added, or for sequences if elements can be replaced. The\\n same exceptions should be raised for improper *key* values as for\\n the ``__getitem__()`` method.\\n\\nobject.__delitem__(self, key)\\n\\n Called to implement deletion of ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support removal of keys, or for sequences if elements\\n can be removed from the sequence. The same exceptions should be\\n raised for improper *key* values as for the ``__getitem__()``\\n method.\\n\\nobject.__iter__(self)\\n\\n This method is called when an iterator is required for a container.\\n This method should return a new iterator object that can iterate\\n over all the objects in the container. For mappings, it should\\n iterate over the keys of the container, and should also be made\\n available as the method ``keys()``.\\n\\n Iterator objects also need to implement this method; they are\\n required to return themselves. For more information on iterator\\n objects, see *Iterator Types*.\\n\\nobject.__reversed__(self)\\n\\n Called (if present) by the ``reversed()`` built-in to implement\\n reverse iteration. It should return a new iterator object that\\n iterates over all the objects in the container in reverse order.\\n\\n If the ``__reversed__()`` method is not provided, the\\n ``reversed()`` built-in will fall back to using the sequence\\n protocol (``__len__()`` and ``__getitem__()``). Objects that\\n support the sequence protocol should only provide\\n ``__reversed__()`` if they can provide an implementation that is\\n more efficient than the one provided by ``reversed()``.\\n\\nThe membership test operators (``in`` and ``not in``) are normally\\nimplemented as an iteration through a sequence. However, container\\nobjects can supply the following special method with a more efficient\\nimplementation, which also does not require the object be a sequence.\\n\\nobject.__contains__(self, item)\\n\\n Called to implement membership test operators. Should return true\\n if *item* is in *self*, false otherwise. For mapping objects, this\\n should consider the keys of the mapping rather than the values or\\n the key-item pairs.\\n\\n For objects that don\\'t define ``__contains__()``, the membership\\n test first tries iteration via ``__iter__()``, then the old\\n sequence iteration protocol via ``__getitem__()``, see *this\\n section in the language reference*.\\n\\n\\nEmulating numeric types\\n=======================\\n\\nThe following methods can be defined to emulate numeric objects.\\nMethods corresponding to operations that are not supported by the\\nparticular kind of number implemented (e.g., bitwise operations for\\nnon-integral numbers) should be left undefined.\\n\\nobject.__add__(self, other)\\nobject.__sub__(self, other)\\nobject.__mul__(self, other)\\nobject.__truediv__(self, other)\\nobject.__floordiv__(self, other)\\nobject.__mod__(self, other)\\nobject.__divmod__(self, other)\\nobject.__pow__(self, other[, modulo])\\nobject.__lshift__(self, other)\\nobject.__rshift__(self, other)\\nobject.__and__(self, other)\\nobject.__xor__(self, other)\\nobject.__or__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``). For instance, to evaluate the expression ``x + y``, where\\n *x* is an instance of a class that has an ``__add__()`` method,\\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\\n should not be related to ``__truediv__()``. Note that\\n ``__pow__()`` should be defined to accept an optional third\\n argument if the ternary version of the built-in ``pow()`` function\\n is to be supported.\\n\\n If one of those methods does not support the operation with the\\n supplied arguments, it should return ``NotImplemented``.\\n\\nobject.__radd__(self, other)\\nobject.__rsub__(self, other)\\nobject.__rmul__(self, other)\\nobject.__rtruediv__(self, other)\\nobject.__rfloordiv__(self, other)\\nobject.__rmod__(self, other)\\nobject.__rdivmod__(self, other)\\nobject.__rpow__(self, other)\\nobject.__rlshift__(self, other)\\nobject.__rrshift__(self, other)\\nobject.__rand__(self, other)\\nobject.__rxor__(self, other)\\nobject.__ror__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``) with reflected (swapped) operands. These functions are only\\n called if the left operand does not support the corresponding\\n operation and the operands are of different types. [2] For\\n instance, to evaluate the expression ``x - y``, where *y* is an\\n instance of a class that has an ``__rsub__()`` method,\\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\\n *NotImplemented*.\\n\\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\\n (the coercion rules would become too complicated).\\n\\n Note: If the right operand\\'s type is a subclass of the left operand\\'s\\n type and that subclass provides the reflected method for the\\n operation, this method will be called before the left operand\\'s\\n non-reflected method. This behavior allows subclasses to\\n override their ancestors\\' operations.\\n\\nobject.__iadd__(self, other)\\nobject.__isub__(self, other)\\nobject.__imul__(self, other)\\nobject.__itruediv__(self, other)\\nobject.__ifloordiv__(self, other)\\nobject.__imod__(self, other)\\nobject.__ipow__(self, other[, modulo])\\nobject.__ilshift__(self, other)\\nobject.__irshift__(self, other)\\nobject.__iand__(self, other)\\nobject.__ixor__(self, other)\\nobject.__ior__(self, other)\\n\\n These methods are called to implement the augmented arithmetic\\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\\n should attempt to do the operation in-place (modifying *self*) and\\n return the result (which could be, but does not have to be,\\n *self*). If a specific method is not defined, the augmented\\n assignment falls back to the normal methods. For instance, to\\n execute the statement ``x += y``, where *x* is an instance of a\\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\\n called. If *x* is an instance of a class that does not define a\\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\\n considered, as with the evaluation of ``x + y``.\\n\\nobject.__neg__(self)\\nobject.__pos__(self)\\nobject.__abs__(self)\\nobject.__invert__(self)\\n\\n Called to implement the unary arithmetic operations (``-``, ``+``,\\n ``abs()`` and ``~``).\\n\\nobject.__complex__(self)\\nobject.__int__(self)\\nobject.__float__(self)\\nobject.__round__(self[, n])\\n\\n Called to implement the built-in functions ``complex()``,\\n ``int()``, ``float()`` and ``round()``. Should return a value of\\n the appropriate type.\\n\\nobject.__index__(self)\\n\\n Called to implement ``operator.index()``. Also called whenever\\n Python needs an integer object (such as in slicing, or in the\\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\\n an integer.\\n\\n\\nWith Statement Context Managers\\n===============================\\n\\nA *context manager* is an object that defines the runtime context to\\nbe established when executing a ``with`` statement. The context\\nmanager handles the entry into, and the exit from, the desired runtime\\ncontext for the execution of the block of code. Context managers are\\nnormally invoked using the ``with`` statement (described in section\\n*The with statement*), but can also be used by directly invoking their\\nmethods.\\n\\nTypical uses of context managers include saving and restoring various\\nkinds of global state, locking and unlocking resources, closing opened\\nfiles, etc.\\n\\nFor more information on context managers, see *Context Manager Types*.\\n\\nobject.__enter__(self)\\n\\n Enter the runtime context related to this object. The ``with``\\n statement will bind this method\\'s return value to the target(s)\\n specified in the ``as`` clause of the statement, if any.\\n\\nobject.__exit__(self, exc_type, exc_value, traceback)\\n\\n Exit the runtime context related to this object. The parameters\\n describe the exception that caused the context to be exited. If the\\n context was exited without an exception, all three arguments will\\n be ``None``.\\n\\n If an exception is supplied, and the method wishes to suppress the\\n exception (i.e., prevent it from being propagated), it should\\n return a true value. Otherwise, the exception will be processed\\n normally upon exit from this method.\\n\\n Note that ``__exit__()`` methods should not reraise the passed-in\\n exception; this is the caller\\'s responsibility.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n\\n\\nSpecial method lookup\\n=====================\\n\\nFor custom classes, implicit invocations of special methods are only\\nguaranteed to work correctly if defined on an object\\'s type, not in\\nthe object\\'s instance dictionary. That behaviour is the reason why\\nthe following code raises an exception:\\n\\n >>> class C:\\n ... pass\\n ...\\n >>> c = C()\\n >>> c.__len__ = lambda: 5\\n >>> len(c)\\n Traceback (most recent call last):\\n File \"\", line 1, in \\n TypeError: object of type \\'C\\' has no len()\\n\\nThe rationale behind this behaviour lies with a number of special\\nmethods such as ``__hash__()`` and ``__repr__()`` that are implemented\\nby all objects, including type objects. If the implicit lookup of\\nthese methods used the conventional lookup process, they would fail\\nwhen invoked on the type object itself:\\n\\n >>> 1 .__hash__() == hash(1)\\n True\\n >>> int.__hash__() == hash(int)\\n Traceback (most recent call last):\\n File \"\", line 1, in \\n TypeError: descriptor \\'__hash__\\' of \\'int\\' object needs an argument\\n\\nIncorrectly attempting to invoke an unbound method of a class in this\\nway is sometimes referred to as \\'metaclass confusion\\', and is avoided\\nby bypassing the instance when looking up special methods:\\n\\n >>> type(1).__hash__(1) == hash(1)\\n True\\n >>> type(int).__hash__(int) == hash(int)\\n True\\n\\nIn addition to bypassing any instance attributes in the interest of\\ncorrectness, implicit special method lookup generally also bypasses\\nthe ``__getattribute__()`` method even of the object\\'s metaclass:\\n\\n >>> class Meta(type):\\n ... def __getattribute__(*args):\\n ... print(\"Metaclass getattribute invoked\")\\n ... return type.__getattribute__(*args)\\n ...\\n >>> class C(object, metaclass=Meta):\\n ... def __len__(self):\\n ... return 10\\n ... def __getattribute__(*args):\\n ... print(\"Class getattribute invoked\")\\n ... return object.__getattribute__(*args)\\n ...\\n >>> c = C()\\n >>> c.__len__() # Explicit lookup via instance\\n Class getattribute invoked\\n 10\\n >>> type(c).__len__(c) # Explicit lookup via type\\n Metaclass getattribute invoked\\n 10\\n >>> len(c) # Implicit lookup\\n 10\\n\\nBypassing the ``__getattribute__()`` machinery in this fashion\\nprovides significant scope for speed optimisations within the\\ninterpreter, at the cost of some flexibility in the handling of\\nspecial methods (the special method *must* be set on the class object\\nitself in order to be consistently invoked by the interpreter).\\n\\n-[ Footnotes ]-\\n\\n[1] It *is* possible in some cases to change an object\\'s type, under\\n certain controlled conditions. It generally isn\\'t a good idea\\n though, since it can lead to some very strange behaviour if it is\\n handled incorrectly.\\n\\n[2] For operands of the same type, it is assumed that if the non-\\n reflected method (such as ``__add__()``) fails the operation is\\n not supported, which is why the reflected method is not called.\\n',\n'string-methods':'\\nString Methods\\n**************\\n\\nStrings implement all of the *common* sequence operations, along with\\nthe additional methods described below.\\n\\nStrings also support two styles of string formatting, one providing a\\nlarge degree of flexibility and customization (see ``str.format()``,\\n*Format String Syntax* and *String Formatting*) and the other based on\\nC ``printf`` style formatting that handles a narrower range of types\\nand is slightly harder to use correctly, but is often faster for the\\ncases it can handle (*printf-style String Formatting*).\\n\\nThe *Text Processing Services* section of the standard library covers\\na number of other modules that provide various text related utilities\\n(including regular expression support in the ``re`` module).\\n\\nstr.capitalize()\\n\\n Return a copy of the string with its first character capitalized\\n and the rest lowercased.\\n\\nstr.casefold()\\n\\n Return a casefolded copy of the string. Casefolded strings may be\\n used for caseless matching.\\n\\n Casefolding is similar to lowercasing but more aggressive because\\n it is intended to remove all case distinctions in a string. For\\n example, the German lowercase letter ``\\'\\xc3\\x9f\\'`` is equivalent to\\n ``\"ss\"``. Since it is already lowercase, ``lower()`` would do\\n nothing to ``\\'\\xc3\\x9f\\'``; ``casefold()`` converts it to ``\"ss\"``.\\n\\n The casefolding algorithm is described in section 3.13 of the\\n Unicode Standard.\\n\\n New in version 3.3.\\n\\nstr.center(width[, fillchar])\\n\\n Return centered in a string of length *width*. Padding is done\\n using the specified *fillchar* (default is a space).\\n\\nstr.count(sub[, start[, end]])\\n\\n Return the number of non-overlapping occurrences of substring *sub*\\n in the range [*start*, *end*]. Optional arguments *start* and\\n *end* are interpreted as in slice notation.\\n\\nstr.encode(encoding=\"utf-8\", errors=\"strict\")\\n\\n Return an encoded version of the string as a bytes object. Default\\n encoding is ``\\'utf-8\\'``. *errors* may be given to set a different\\n error handling scheme. The default for *errors* is ``\\'strict\\'``,\\n meaning that encoding errors raise a ``UnicodeError``. Other\\n possible values are ``\\'ignore\\'``, ``\\'replace\\'``,\\n ``\\'xmlcharrefreplace\\'``, ``\\'backslashreplace\\'`` and any other name\\n registered via ``codecs.register_error()``, see section *Codec Base\\n Classes*. For a list of possible encodings, see section *Standard\\n Encodings*.\\n\\n Changed in version 3.1: Support for keyword arguments added.\\n\\nstr.endswith(suffix[, start[, end]])\\n\\n Return ``True`` if the string ends with the specified *suffix*,\\n otherwise return ``False``. *suffix* can also be a tuple of\\n suffixes to look for. With optional *start*, test beginning at\\n that position. With optional *end*, stop comparing at that\\n position.\\n\\nstr.expandtabs([tabsize])\\n\\n Return a copy of the string where all tab characters are replaced\\n by zero or more spaces, depending on the current column and the\\n given tab size. The column number is reset to zero after each\\n newline occurring in the string. If *tabsize* is not given, a tab\\n size of ``8`` characters is assumed. This doesn\\'t understand other\\n non-printing characters or escape sequences.\\n\\nstr.find(sub[, start[, end]])\\n\\n Return the lowest index in the string where substring *sub* is\\n found, such that *sub* is contained in the slice ``s[start:end]``.\\n Optional arguments *start* and *end* are interpreted as in slice\\n notation. Return ``-1`` if *sub* is not found.\\n\\n Note: The ``find()`` method should be used only if you need to know the\\n position of *sub*. To check if *sub* is a substring or not, use\\n the ``in`` operator:\\n\\n >>> \\'Py\\' in \\'Python\\'\\n True\\n\\nstr.format(*args, **kwargs)\\n\\n Perform a string formatting operation. The string on which this\\n method is called can contain literal text or replacement fields\\n delimited by braces ``{}``. Each replacement field contains either\\n the numeric index of a positional argument, or the name of a\\n keyword argument. Returns a copy of the string where each\\n replacement field is replaced with the string value of the\\n corresponding argument.\\n\\n >>> \"The sum of 1 + 2 is {0}\".format(1+2)\\n \\'The sum of 1 + 2 is 3\\'\\n\\n See *Format String Syntax* for a description of the various\\n formatting options that can be specified in format strings.\\n\\nstr.format_map(mapping)\\n\\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\\n used directly and not copied to a ``dict`` . This is useful if for\\n example ``mapping`` is a dict subclass:\\n\\n >>> class Default(dict):\\n ... def __missing__(self, key):\\n ... return key\\n ...\\n >>> \\'{name} was born in {country}\\'.format_map(Default(name=\\'Guido\\'))\\n \\'Guido was born in country\\'\\n\\n New in version 3.2.\\n\\nstr.index(sub[, start[, end]])\\n\\n Like ``find()``, but raise ``ValueError`` when the substring is not\\n found.\\n\\nstr.isalnum()\\n\\n Return true if all characters in the string are alphanumeric and\\n there is at least one character, false otherwise. A character\\n ``c`` is alphanumeric if one of the following returns ``True``:\\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\\n ``c.isnumeric()``.\\n\\nstr.isalpha()\\n\\n Return true if all characters in the string are alphabetic and\\n there is at least one character, false otherwise. Alphabetic\\n characters are those characters defined in the Unicode character\\n database as \"Letter\", i.e., those with general category property\\n being one of \"Lm\", \"Lt\", \"Lu\", \"Ll\", or \"Lo\". Note that this is\\n different from the \"Alphabetic\" property defined in the Unicode\\n Standard.\\n\\nstr.isdecimal()\\n\\n Return true if all characters in the string are decimal characters\\n and there is at least one character, false otherwise. Decimal\\n characters are those from general category \"Nd\". This category\\n includes digit characters, and all characters that can be used to\\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\\n\\nstr.isdigit()\\n\\n Return true if all characters in the string are digits and there is\\n at least one character, false otherwise. Digits include decimal\\n characters and digits that need special handling, such as the\\n compatibility superscript digits. Formally, a digit is a character\\n that has the property value Numeric_Type=Digit or\\n Numeric_Type=Decimal.\\n\\nstr.isidentifier()\\n\\n Return true if the string is a valid identifier according to the\\n language definition, section *Identifiers and keywords*.\\n\\nstr.islower()\\n\\n Return true if all cased characters [4] in the string are lowercase\\n and there is at least one cased character, false otherwise.\\n\\nstr.isnumeric()\\n\\n Return true if all characters in the string are numeric characters,\\n and there is at least one character, false otherwise. Numeric\\n characters include digit characters, and all characters that have\\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\\n ONE FIFTH. Formally, numeric characters are those with the\\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\\n Numeric_Type=Numeric.\\n\\nstr.isprintable()\\n\\n Return true if all characters in the string are printable or the\\n string is empty, false otherwise. Nonprintable characters are\\n those characters defined in the Unicode character database as\\n \"Other\" or \"Separator\", excepting the ASCII space (0x20) which is\\n considered printable. (Note that printable characters in this\\n context are those which should not be escaped when ``repr()`` is\\n invoked on a string. It has no bearing on the handling of strings\\n written to ``sys.stdout`` or ``sys.stderr``.)\\n\\nstr.isspace()\\n\\n Return true if there are only whitespace characters in the string\\n and there is at least one character, false otherwise. Whitespace\\n characters are those characters defined in the Unicode character\\n database as \"Other\" or \"Separator\" and those with bidirectional\\n property being one of \"WS\", \"B\", or \"S\".\\n\\nstr.istitle()\\n\\n Return true if the string is a titlecased string and there is at\\n least one character, for example uppercase characters may only\\n follow uncased characters and lowercase characters only cased ones.\\n Return false otherwise.\\n\\nstr.isupper()\\n\\n Return true if all cased characters [4] in the string are uppercase\\n and there is at least one cased character, false otherwise.\\n\\nstr.join(iterable)\\n\\n Return a string which is the concatenation of the strings in the\\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\\n any non-string values in *iterable*, including ``bytes`` objects.\\n The separator between elements is the string providing this method.\\n\\nstr.ljust(width[, fillchar])\\n\\n Return the string left justified in a string of length *width*.\\n Padding is done using the specified *fillchar* (default is a\\n space). The original string is returned if *width* is less than or\\n equal to ``len(s)``.\\n\\nstr.lower()\\n\\n Return a copy of the string with all the cased characters [4]\\n converted to lowercase.\\n\\n The lowercasing algorithm used is described in section 3.13 of the\\n Unicode Standard.\\n\\nstr.lstrip([chars])\\n\\n Return a copy of the string with leading characters removed. The\\n *chars* argument is a string specifying the set of characters to be\\n removed. If omitted or ``None``, the *chars* argument defaults to\\n removing whitespace. The *chars* argument is not a prefix; rather,\\n all combinations of its values are stripped:\\n\\n >>> \\' spacious \\'.lstrip()\\n \\'spacious \\'\\n >>> \\'www.example.com\\'.lstrip(\\'cmowz.\\')\\n \\'example.com\\'\\n\\nstatic str.maketrans(x[, y[, z]])\\n\\n This static method returns a translation table usable for\\n ``str.translate()``.\\n\\n If there is only one argument, it must be a dictionary mapping\\n Unicode ordinals (integers) or characters (strings of length 1) to\\n Unicode ordinals, strings (of arbitrary lengths) or None.\\n Character keys will then be converted to ordinals.\\n\\n If there are two arguments, they must be strings of equal length,\\n and in the resulting dictionary, each character in x will be mapped\\n to the character at the same position in y. If there is a third\\n argument, it must be a string, whose characters will be mapped to\\n None in the result.\\n\\nstr.partition(sep)\\n\\n Split the string at the first occurrence of *sep*, and return a\\n 3-tuple containing the part before the separator, the separator\\n itself, and the part after the separator. If the separator is not\\n found, return a 3-tuple containing the string itself, followed by\\n two empty strings.\\n\\nstr.replace(old, new[, count])\\n\\n Return a copy of the string with all occurrences of substring *old*\\n replaced by *new*. If the optional argument *count* is given, only\\n the first *count* occurrences are replaced.\\n\\nstr.rfind(sub[, start[, end]])\\n\\n Return the highest index in the string where substring *sub* is\\n found, such that *sub* is contained within ``s[start:end]``.\\n Optional arguments *start* and *end* are interpreted as in slice\\n notation. Return ``-1`` on failure.\\n\\nstr.rindex(sub[, start[, end]])\\n\\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\\n is not found.\\n\\nstr.rjust(width[, fillchar])\\n\\n Return the string right justified in a string of length *width*.\\n Padding is done using the specified *fillchar* (default is a\\n space). The original string is returned if *width* is less than or\\n equal to ``len(s)``.\\n\\nstr.rpartition(sep)\\n\\n Split the string at the last occurrence of *sep*, and return a\\n 3-tuple containing the part before the separator, the separator\\n itself, and the part after the separator. If the separator is not\\n found, return a 3-tuple containing two empty strings, followed by\\n the string itself.\\n\\nstr.rsplit(sep=None, maxsplit=-1)\\n\\n Return a list of the words in the string, using *sep* as the\\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\\n are done, the *rightmost* ones. If *sep* is not specified or\\n ``None``, any whitespace string is a separator. Except for\\n splitting from the right, ``rsplit()`` behaves like ``split()``\\n which is described in detail below.\\n\\nstr.rstrip([chars])\\n\\n Return a copy of the string with trailing characters removed. The\\n *chars* argument is a string specifying the set of characters to be\\n removed. If omitted or ``None``, the *chars* argument defaults to\\n removing whitespace. The *chars* argument is not a suffix; rather,\\n all combinations of its values are stripped:\\n\\n >>> \\' spacious \\'.rstrip()\\n \\' spacious\\'\\n >>> \\'mississippi\\'.rstrip(\\'ipz\\')\\n \\'mississ\\'\\n\\nstr.split(sep=None, maxsplit=-1)\\n\\n Return a list of the words in the string, using *sep* as the\\n delimiter string. If *maxsplit* is given, at most *maxsplit*\\n splits are done (thus, the list will have at most ``maxsplit+1``\\n elements). If *maxsplit* is not specified or ``-1``, then there is\\n no limit on the number of splits (all possible splits are made).\\n\\n If *sep* is given, consecutive delimiters are not grouped together\\n and are deemed to delimit empty strings (for example,\\n ``\\'1,,2\\'.split(\\',\\')`` returns ``[\\'1\\', \\'\\', \\'2\\']``). The *sep*\\n argument may consist of multiple characters (for example,\\n ``\\'1<>2<>3\\'.split(\\'<>\\')`` returns ``[\\'1\\', \\'2\\', \\'3\\']``). Splitting\\n an empty string with a specified separator returns ``[\\'\\']``.\\n\\n If *sep* is not specified or is ``None``, a different splitting\\n algorithm is applied: runs of consecutive whitespace are regarded\\n as a single separator, and the result will contain no empty strings\\n at the start or end if the string has leading or trailing\\n whitespace. Consequently, splitting an empty string or a string\\n consisting of just whitespace with a ``None`` separator returns\\n ``[]``.\\n\\n For example, ``\\' 1 2 3 \\'.split()`` returns ``[\\'1\\', \\'2\\', \\'3\\']``,\\n and ``\\' 1 2 3 \\'.split(None, 1)`` returns ``[\\'1\\', \\'2 3 \\']``.\\n\\nstr.splitlines([keepends])\\n\\n Return a list of the lines in the string, breaking at line\\n boundaries. This method uses the *universal newlines* approach to\\n splitting lines. Line breaks are not included in the resulting list\\n unless *keepends* is given and true.\\n\\n For example, ``\\'ab c\\\\n\\\\nde fg\\\\rkl\\\\r\\\\n\\'.splitlines()`` returns\\n ``[\\'ab c\\', \\'\\', \\'de fg\\', \\'kl\\']``, while the same call with\\n ``splitlines(True)`` returns ``[\\'ab c\\\\n\\', \\'\\\\n\\', \\'de fg\\\\r\\',\\n \\'kl\\\\r\\\\n\\']``.\\n\\n Unlike ``split()`` when a delimiter string *sep* is given, this\\n method returns an empty list for the empty string, and a terminal\\n line break does not result in an extra line.\\n\\nstr.startswith(prefix[, start[, end]])\\n\\n Return ``True`` if string starts with the *prefix*, otherwise\\n return ``False``. *prefix* can also be a tuple of prefixes to look\\n for. With optional *start*, test string beginning at that\\n position. With optional *end*, stop comparing string at that\\n position.\\n\\nstr.strip([chars])\\n\\n Return a copy of the string with the leading and trailing\\n characters removed. The *chars* argument is a string specifying the\\n set of characters to be removed. If omitted or ``None``, the\\n *chars* argument defaults to removing whitespace. The *chars*\\n argument is not a prefix or suffix; rather, all combinations of its\\n values are stripped:\\n\\n >>> \\' spacious \\'.strip()\\n \\'spacious\\'\\n >>> \\'www.example.com\\'.strip(\\'cmowz.\\')\\n \\'example\\'\\n\\nstr.swapcase()\\n\\n Return a copy of the string with uppercase characters converted to\\n lowercase and vice versa. Note that it is not necessarily true that\\n ``s.swapcase().swapcase() == s``.\\n\\nstr.title()\\n\\n Return a titlecased version of the string where words start with an\\n uppercase character and the remaining characters are lowercase.\\n\\n The algorithm uses a simple language-independent definition of a\\n word as groups of consecutive letters. The definition works in\\n many contexts but it means that apostrophes in contractions and\\n possessives form word boundaries, which may not be the desired\\n result:\\n\\n >>> \"they\\'re bill\\'s friends from the UK\".title()\\n \"They\\'Re Bill\\'S Friends From The Uk\"\\n\\n A workaround for apostrophes can be constructed using regular\\n expressions:\\n\\n >>> import re\\n >>> def titlecase(s):\\n ... return re.sub(r\"[A-Za-z]+(\\'[A-Za-z]+)?\",\\n ... lambda mo: mo.group(0)[0].upper() +\\n ... mo.group(0)[1:].lower(),\\n ... s)\\n ...\\n >>> titlecase(\"they\\'re bill\\'s friends.\")\\n \"They\\'re Bill\\'s Friends.\"\\n\\nstr.translate(map)\\n\\n Return a copy of the *s* where all characters have been mapped\\n through the *map* which must be a dictionary of Unicode ordinals\\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\\n characters are left untouched. Characters mapped to ``None`` are\\n deleted.\\n\\n You can use ``str.maketrans()`` to create a translation map from\\n character-to-character mappings in different formats.\\n\\n Note: An even more flexible approach is to create a custom character\\n mapping codec using the ``codecs`` module (see\\n ``encodings.cp1251`` for an example).\\n\\nstr.upper()\\n\\n Return a copy of the string with all the cased characters [4]\\n converted to uppercase. Note that ``str.upper().isupper()`` might\\n be ``False`` if ``s`` contains uncased characters or if the Unicode\\n category of the resulting character(s) is not \"Lu\" (Letter,\\n uppercase), but e.g. \"Lt\" (Letter, titlecase).\\n\\n The uppercasing algorithm used is described in section 3.13 of the\\n Unicode Standard.\\n\\nstr.zfill(width)\\n\\n Return the numeric string left filled with zeros in a string of\\n length *width*. A sign prefix is handled correctly. The original\\n string is returned if *width* is less than or equal to ``len(s)``.\\n',\n'strings':'\\nString and Bytes literals\\n*************************\\n\\nString literals are described by the following lexical definitions:\\n\\n stringliteral ::= [stringprefix](shortstring | longstring)\\n stringprefix ::= \"r\" | \"u\" | \"R\" | \"U\"\\n shortstring ::= \"\\'\" shortstringitem* \"\\'\" | \\'\"\\' shortstringitem* \\'\"\\'\\n longstring ::= \"\\'\\'\\'\" longstringitem* \"\\'\\'\\'\" | \\'\"\"\"\\' longstringitem* \\'\"\"\"\\'\\n shortstringitem ::= shortstringchar | stringescapeseq\\n longstringitem ::= longstringchar | stringescapeseq\\n shortstringchar ::= \\n longstringchar ::= \\n stringescapeseq ::= \"\\\\\" \\n\\n bytesliteral ::= bytesprefix(shortbytes | longbytes)\\n bytesprefix ::= \"b\" | \"B\" | \"br\" | \"Br\" | \"bR\" | \"BR\" | \"rb\" | \"rB\" | \"Rb\" | \"RB\"\\n shortbytes ::= \"\\'\" shortbytesitem* \"\\'\" | \\'\"\\' shortbytesitem* \\'\"\\'\\n longbytes ::= \"\\'\\'\\'\" longbytesitem* \"\\'\\'\\'\" | \\'\"\"\"\\' longbytesitem* \\'\"\"\"\\'\\n shortbytesitem ::= shortbyteschar | bytesescapeseq\\n longbytesitem ::= longbyteschar | bytesescapeseq\\n shortbyteschar ::= \\n longbyteschar ::= \\n bytesescapeseq ::= \"\\\\\" \\n\\nOne syntactic restriction not indicated by these productions is that\\nwhitespace is not allowed between the ``stringprefix`` or\\n``bytesprefix`` and the rest of the literal. The source character set\\nis defined by the encoding declaration; it is UTF-8 if no encoding\\ndeclaration is given in the source file; see section *Encoding\\ndeclarations*.\\n\\nIn plain English: Both types of literals can be enclosed in matching\\nsingle quotes (``\\'``) or double quotes (``\"``). They can also be\\nenclosed in matching groups of three single or double quotes (these\\nare generally referred to as *triple-quoted strings*). The backslash\\n(``\\\\``) character is used to escape characters that otherwise have a\\nspecial meaning, such as newline, backslash itself, or the quote\\ncharacter.\\n\\nBytes literals are always prefixed with ``\\'b\\'`` or ``\\'B\\'``; they\\nproduce an instance of the ``bytes`` type instead of the ``str`` type.\\nThey may only contain ASCII characters; bytes with a numeric value of\\n128 or greater must be expressed with escapes.\\n\\nAs of Python 3.3 it is possible again to prefix unicode strings with a\\n``u`` prefix to simplify maintenance of dual 2.x and 3.x codebases.\\n\\nBoth string and bytes literals may optionally be prefixed with a\\nletter ``\\'r\\'`` or ``\\'R\\'``; such strings are called *raw strings* and\\ntreat backslashes as literal characters. As a result, in string\\nliterals, ``\\'\\\\U\\'`` and ``\\'\\\\u\\'`` escapes in raw strings are not treated\\nspecially. Given that Python 2.x\\'s raw unicode literals behave\\ndifferently than Python 3.x\\'s the ``\\'ur\\'`` syntax is not supported.\\n\\n New in version 3.3: The ``\\'rb\\'`` prefix of raw bytes literals has\\n been added as a synonym of ``\\'br\\'``.\\n\\n New in version 3.3: Support for the unicode legacy literal\\n (``u\\'value\\'``) was reintroduced to simplify the maintenance of dual\\n Python 2.x and 3.x codebases. See **PEP 414** for more information.\\n\\nIn triple-quoted strings, unescaped newlines and quotes are allowed\\n(and are retained), except that three unescaped quotes in a row\\nterminate the string. (A \"quote\" is the character used to open the\\nstring, i.e. either ``\\'`` or ``\"``.)\\n\\nUnless an ``\\'r\\'`` or ``\\'R\\'`` prefix is present, escape sequences in\\nstrings are interpreted according to rules similar to those used by\\nStandard C. The recognized escape sequences are:\\n\\n+-------------------+-----------------------------------+---------+\\n| Escape Sequence | Meaning | Notes |\\n+===================+===================================+=========+\\n| ``\\\\newline`` | Backslash and newline ignored | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\\\\\`` | Backslash (``\\\\``) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\\\'`` | Single quote (``\\'``) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\\"`` | Double quote (``\"``) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\a`` | ASCII Bell (BEL) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\b`` | ASCII Backspace (BS) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\f`` | ASCII Formfeed (FF) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\n`` | ASCII Linefeed (LF) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\r`` | ASCII Carriage Return (CR) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\t`` | ASCII Horizontal Tab (TAB) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\v`` | ASCII Vertical Tab (VT) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\ooo`` | Character with octal value *ooo* | (1,3) |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\xhh`` | Character with hex value *hh* | (2,3) |\\n+-------------------+-----------------------------------+---------+\\n\\nEscape sequences only recognized in string literals are:\\n\\n+-------------------+-----------------------------------+---------+\\n| Escape Sequence | Meaning | Notes |\\n+===================+===================================+=========+\\n| ``\\\\N{name}`` | Character named *name* in the | (4) |\\n| | Unicode database | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\uxxxx`` | Character with 16-bit hex value | (5) |\\n| | *xxxx* | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\Uxxxxxxxx`` | Character with 32-bit hex value | (6) |\\n| | *xxxxxxxx* | |\\n+-------------------+-----------------------------------+---------+\\n\\nNotes:\\n\\n1. As in Standard C, up to three octal digits are accepted.\\n\\n2. Unlike in Standard C, exactly two hex digits are required.\\n\\n3. In a bytes literal, hexadecimal and octal escapes denote the byte\\n with the given value. In a string literal, these escapes denote a\\n Unicode character with the given value.\\n\\n4. Changed in version 3.3: Support for name aliases [1] has been\\n added.\\n\\n5. Individual code units which form parts of a surrogate pair can be\\n encoded using this escape sequence. Exactly four hex digits are\\n required.\\n\\n6. Any Unicode character can be encoded this way. Exactly eight hex\\n digits are required.\\n\\nUnlike Standard C, all unrecognized escape sequences are left in the\\nstring unchanged, i.e., *the backslash is left in the string*. (This\\nbehavior is useful when debugging: if an escape sequence is mistyped,\\nthe resulting output is more easily recognized as broken.) It is also\\nimportant to note that the escape sequences only recognized in string\\nliterals fall into the category of unrecognized escapes for bytes\\nliterals.\\n\\nEven in a raw string, string quotes can be escaped with a backslash,\\nbut the backslash remains in the string; for example, ``r\"\\\\\"\"`` is a\\nvalid string literal consisting of two characters: a backslash and a\\ndouble quote; ``r\"\\\\\"`` is not a valid string literal (even a raw\\nstring cannot end in an odd number of backslashes). Specifically, *a\\nraw string cannot end in a single backslash* (since the backslash\\nwould escape the following quote character). Note also that a single\\nbackslash followed by a newline is interpreted as those two characters\\nas part of the string, *not* as a line continuation.\\n',\n'subscriptions':'\\nSubscriptions\\n*************\\n\\nA subscription selects an item of a sequence (string, tuple or list)\\nor mapping (dictionary) object:\\n\\n subscription ::= primary \"[\" expression_list \"]\"\\n\\nThe primary must evaluate to an object that supports subscription,\\ne.g. a list or dictionary. User-defined objects can support\\nsubscription by defining a ``__getitem__()`` method.\\n\\nFor built-in objects, there are two types of objects that support\\nsubscription:\\n\\nIf the primary is a mapping, the expression list must evaluate to an\\nobject whose value is one of the keys of the mapping, and the\\nsubscription selects the value in the mapping that corresponds to that\\nkey. (The expression list is a tuple except if it has exactly one\\nitem.)\\n\\nIf the primary is a sequence, the expression (list) must evaluate to\\nan integer or a slice (as discussed in the following section).\\n\\nThe formal syntax makes no special provision for negative indices in\\nsequences; however, built-in sequences all provide a ``__getitem__()``\\nmethod that interprets negative indices by adding the length of the\\nsequence to the index (so that ``x[-1]`` selects the last item of\\n``x``). The resulting value must be a nonnegative integer less than\\nthe number of items in the sequence, and the subscription selects the\\nitem whose index is that value (counting from zero). Since the support\\nfor negative indices and slicing occurs in the object\\'s\\n``__getitem__()`` method, subclasses overriding this method will need\\nto explicitly add that support.\\n\\nA string\\'s items are characters. A character is not a separate data\\ntype but a string of exactly one character.\\n',\n'truth':\"\\nTruth Value Testing\\n*******************\\n\\nAny object can be tested for truth value, for use in an ``if`` or\\n``while`` condition or as operand of the Boolean operations below. The\\nfollowing values are considered false:\\n\\n* ``None``\\n\\n* ``False``\\n\\n* zero of any numeric type, for example, ``0``, ``0.0``, ``0j``.\\n\\n* any empty sequence, for example, ``''``, ``()``, ``[]``.\\n\\n* any empty mapping, for example, ``{}``.\\n\\n* instances of user-defined classes, if the class defines a\\n ``__bool__()`` or ``__len__()`` method, when that method returns the\\n integer zero or ``bool`` value ``False``. [1]\\n\\nAll other values are considered true --- so objects of many types are\\nalways true.\\n\\nOperations and built-in functions that have a Boolean result always\\nreturn ``0`` or ``False`` for false and ``1`` or ``True`` for true,\\nunless otherwise stated. (Important exception: the Boolean operations\\n``or`` and ``and`` always return one of their operands.)\\n\",\n'try':'\\nThe ``try`` statement\\n*********************\\n\\nThe ``try`` statement specifies exception handlers and/or cleanup code\\nfor a group of statements:\\n\\n try_stmt ::= try1_stmt | try2_stmt\\n try1_stmt ::= \"try\" \":\" suite\\n (\"except\" [expression [\"as\" target]] \":\" suite)+\\n [\"else\" \":\" suite]\\n [\"finally\" \":\" suite]\\n try2_stmt ::= \"try\" \":\" suite\\n \"finally\" \":\" suite\\n\\nThe ``except`` clause(s) specify one or more exception handlers. When\\nno exception occurs in the ``try`` clause, no exception handler is\\nexecuted. When an exception occurs in the ``try`` suite, a search for\\nan exception handler is started. This search inspects the except\\nclauses in turn until one is found that matches the exception. An\\nexpression-less except clause, if present, must be last; it matches\\nany exception. For an except clause with an expression, that\\nexpression is evaluated, and the clause matches the exception if the\\nresulting object is \"compatible\" with the exception. An object is\\ncompatible with an exception if it is the class or a base class of the\\nexception object or a tuple containing an item compatible with the\\nexception.\\n\\nIf no except clause matches the exception, the search for an exception\\nhandler continues in the surrounding code and on the invocation stack.\\n[1]\\n\\nIf the evaluation of an expression in the header of an except clause\\nraises an exception, the original search for a handler is canceled and\\na search starts for the new exception in the surrounding code and on\\nthe call stack (it is treated as if the entire ``try`` statement\\nraised the exception).\\n\\nWhen a matching except clause is found, the exception is assigned to\\nthe target specified after the ``as`` keyword in that except clause,\\nif present, and the except clause\\'s suite is executed. All except\\nclauses must have an executable block. When the end of this block is\\nreached, execution continues normally after the entire try statement.\\n(This means that if two nested handlers exist for the same exception,\\nand the exception occurs in the try clause of the inner handler, the\\nouter handler will not handle the exception.)\\n\\nWhen an exception has been assigned using ``as target``, it is cleared\\nat the end of the except clause. This is as if\\n\\n except E as N:\\n foo\\n\\nwas translated to\\n\\n except E as N:\\n try:\\n foo\\n finally:\\n del N\\n\\nThis means the exception must be assigned to a different name to be\\nable to refer to it after the except clause. Exceptions are cleared\\nbecause with the traceback attached to them, they form a reference\\ncycle with the stack frame, keeping all locals in that frame alive\\nuntil the next garbage collection occurs.\\n\\nBefore an except clause\\'s suite is executed, details about the\\nexception are stored in the ``sys`` module and can be access via\\n``sys.exc_info()``. ``sys.exc_info()`` returns a 3-tuple consisting of\\nthe exception class, the exception instance and a traceback object\\n(see section *The standard type hierarchy*) identifying the point in\\nthe program where the exception occurred. ``sys.exc_info()`` values\\nare restored to their previous values (before the call) when returning\\nfrom a function that handled an exception.\\n\\nThe optional ``else`` clause is executed if and when control flows off\\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\\nare not handled by the preceding ``except`` clauses.\\n\\nIf ``finally`` is present, it specifies a \\'cleanup\\' handler. The\\n``try`` clause is executed, including any ``except`` and ``else``\\nclauses. If an exception occurs in any of the clauses and is not\\nhandled, the exception is temporarily saved. The ``finally`` clause is\\nexecuted. If there is a saved exception it is re-raised at the end of\\nthe ``finally`` clause. If the ``finally`` clause raises another\\nexception, the saved exception is set as the context of the new\\nexception. If the ``finally`` clause executes a ``return`` or\\n``break`` statement, the saved exception is discarded:\\n\\n def f():\\n try:\\n 1/0\\n finally:\\n return 42\\n\\n >>> f()\\n 42\\n\\nThe exception information is not available to the program during\\nexecution of the ``finally`` clause.\\n\\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\\nthe ``try`` suite of a ``try``...``finally`` statement, the\\n``finally`` clause is also executed \\'on the way out.\\' A ``continue``\\nstatement is illegal in the ``finally`` clause. (The reason is a\\nproblem with the current implementation --- this restriction may be\\nlifted in the future).\\n\\nAdditional information on exceptions can be found in section\\n*Exceptions*, and information on using the ``raise`` statement to\\ngenerate exceptions may be found in section *The raise statement*.\\n',\n'types':'\\nThe standard type hierarchy\\n***************************\\n\\nBelow is a list of the types that are built into Python. Extension\\nmodules (written in C, Java, or other languages, depending on the\\nimplementation) can define additional types. Future versions of\\nPython may add types to the type hierarchy (e.g., rational numbers,\\nefficiently stored arrays of integers, etc.), although such additions\\nwill often be provided via the standard library instead.\\n\\nSome of the type descriptions below contain a paragraph listing\\n\\'special attributes.\\' These are attributes that provide access to the\\nimplementation and are not intended for general use. Their definition\\nmay change in the future.\\n\\nNone\\n This type has a single value. There is a single object with this\\n value. This object is accessed through the built-in name ``None``.\\n It is used to signify the absence of a value in many situations,\\n e.g., it is returned from functions that don\\'t explicitly return\\n anything. Its truth value is false.\\n\\nNotImplemented\\n This type has a single value. There is a single object with this\\n value. This object is accessed through the built-in name\\n ``NotImplemented``. Numeric methods and rich comparison methods may\\n return this value if they do not implement the operation for the\\n operands provided. (The interpreter will then try the reflected\\n operation, or some other fallback, depending on the operator.) Its\\n truth value is true.\\n\\nEllipsis\\n This type has a single value. There is a single object with this\\n value. This object is accessed through the literal ``...`` or the\\n built-in name ``Ellipsis``. Its truth value is true.\\n\\n``numbers.Number``\\n These are created by numeric literals and returned as results by\\n arithmetic operators and arithmetic built-in functions. Numeric\\n objects are immutable; once created their value never changes.\\n Python numbers are of course strongly related to mathematical\\n numbers, but subject to the limitations of numerical representation\\n in computers.\\n\\n Python distinguishes between integers, floating point numbers, and\\n complex numbers:\\n\\n ``numbers.Integral``\\n These represent elements from the mathematical set of integers\\n (positive and negative).\\n\\n There are two types of integers:\\n\\n Integers (``int``)\\n\\n These represent numbers in an unlimited range, subject to\\n available (virtual) memory only. For the purpose of shift\\n and mask operations, a binary representation is assumed, and\\n negative numbers are represented in a variant of 2\\'s\\n complement which gives the illusion of an infinite string of\\n sign bits extending to the left.\\n\\n Booleans (``bool``)\\n These represent the truth values False and True. The two\\n objects representing the values False and True are the only\\n Boolean objects. The Boolean type is a subtype of the integer\\n type, and Boolean values behave like the values 0 and 1,\\n respectively, in almost all contexts, the exception being\\n that when converted to a string, the strings ``\"False\"`` or\\n ``\"True\"`` are returned, respectively.\\n\\n The rules for integer representation are intended to give the\\n most meaningful interpretation of shift and mask operations\\n involving negative integers.\\n\\n ``numbers.Real`` (``float``)\\n These represent machine-level double precision floating point\\n numbers. You are at the mercy of the underlying machine\\n architecture (and C or Java implementation) for the accepted\\n range and handling of overflow. Python does not support single-\\n precision floating point numbers; the savings in processor and\\n memory usage that are usually the reason for using these is\\n dwarfed by the overhead of using objects in Python, so there is\\n no reason to complicate the language with two kinds of floating\\n point numbers.\\n\\n ``numbers.Complex`` (``complex``)\\n These represent complex numbers as a pair of machine-level\\n double precision floating point numbers. The same caveats apply\\n as for floating point numbers. The real and imaginary parts of a\\n complex number ``z`` can be retrieved through the read-only\\n attributes ``z.real`` and ``z.imag``.\\n\\nSequences\\n These represent finite ordered sets indexed by non-negative\\n numbers. The built-in function ``len()`` returns the number of\\n items of a sequence. When the length of a sequence is *n*, the\\n index set contains the numbers 0, 1, ..., *n*-1. Item *i* of\\n sequence *a* is selected by ``a[i]``.\\n\\n Sequences also support slicing: ``a[i:j]`` selects all items with\\n index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an\\n expression, a slice is a sequence of the same type. This implies\\n that the index set is renumbered so that it starts at 0.\\n\\n Some sequences also support \"extended slicing\" with a third \"step\"\\n parameter: ``a[i:j:k]`` selects all items of *a* with index *x*\\n where ``x = i + n*k``, *n* ``>=`` ``0`` and *i* ``<=`` *x* ``<``\\n *j*.\\n\\n Sequences are distinguished according to their mutability:\\n\\n Immutable sequences\\n An object of an immutable sequence type cannot change once it is\\n created. (If the object contains references to other objects,\\n these other objects may be mutable and may be changed; however,\\n the collection of objects directly referenced by an immutable\\n object cannot change.)\\n\\n The following types are immutable sequences:\\n\\n Strings\\n A string is a sequence of values that represent Unicode\\n codepoints. All the codepoints in range ``U+0000 - U+10FFFF``\\n can be represented in a string. Python doesn\\'t have a\\n ``chr`` type, and every character in the string is\\n represented as a string object with length ``1``. The built-\\n in function ``ord()`` converts a character to its codepoint\\n (as an integer); ``chr()`` converts an integer in range ``0 -\\n 10FFFF`` to the corresponding character. ``str.encode()`` can\\n be used to convert a ``str`` to ``bytes`` using the given\\n encoding, and ``bytes.decode()`` can be used to achieve the\\n opposite.\\n\\n Tuples\\n The items of a tuple are arbitrary Python objects. Tuples of\\n two or more items are formed by comma-separated lists of\\n expressions. A tuple of one item (a \\'singleton\\') can be\\n formed by affixing a comma to an expression (an expression by\\n itself does not create a tuple, since parentheses must be\\n usable for grouping of expressions). An empty tuple can be\\n formed by an empty pair of parentheses.\\n\\n Bytes\\n A bytes object is an immutable array. The items are 8-bit\\n bytes, represented by integers in the range 0 <= x < 256.\\n Bytes literals (like ``b\\'abc\\'``) and the built-in function\\n ``bytes()`` can be used to construct bytes objects. Also,\\n bytes objects can be decoded to strings via the ``decode()``\\n method.\\n\\n Mutable sequences\\n Mutable sequences can be changed after they are created. The\\n subscription and slicing notations can be used as the target of\\n assignment and ``del`` (delete) statements.\\n\\n There are currently two intrinsic mutable sequence types:\\n\\n Lists\\n The items of a list are arbitrary Python objects. Lists are\\n formed by placing a comma-separated list of expressions in\\n square brackets. (Note that there are no special cases needed\\n to form lists of length 0 or 1.)\\n\\n Byte Arrays\\n A bytearray object is a mutable array. They are created by\\n the built-in ``bytearray()`` constructor. Aside from being\\n mutable (and hence unhashable), byte arrays otherwise provide\\n the same interface and functionality as immutable bytes\\n objects.\\n\\n The extension module ``array`` provides an additional example of\\n a mutable sequence type, as does the ``collections`` module.\\n\\nSet types\\n These represent unordered, finite sets of unique, immutable\\n objects. As such, they cannot be indexed by any subscript. However,\\n they can be iterated over, and the built-in function ``len()``\\n returns the number of items in a set. Common uses for sets are fast\\n membership testing, removing duplicates from a sequence, and\\n computing mathematical operations such as intersection, union,\\n difference, and symmetric difference.\\n\\n For set elements, the same immutability rules apply as for\\n dictionary keys. Note that numeric types obey the normal rules for\\n numeric comparison: if two numbers compare equal (e.g., ``1`` and\\n ``1.0``), only one of them can be contained in a set.\\n\\n There are currently two intrinsic set types:\\n\\n Sets\\n These represent a mutable set. They are created by the built-in\\n ``set()`` constructor and can be modified afterwards by several\\n methods, such as ``add()``.\\n\\n Frozen sets\\n These represent an immutable set. They are created by the\\n built-in ``frozenset()`` constructor. As a frozenset is\\n immutable and *hashable*, it can be used again as an element of\\n another set, or as a dictionary key.\\n\\nMappings\\n These represent finite sets of objects indexed by arbitrary index\\n sets. The subscript notation ``a[k]`` selects the item indexed by\\n ``k`` from the mapping ``a``; this can be used in expressions and\\n as the target of assignments or ``del`` statements. The built-in\\n function ``len()`` returns the number of items in a mapping.\\n\\n There is currently a single intrinsic mapping type:\\n\\n Dictionaries\\n These represent finite sets of objects indexed by nearly\\n arbitrary values. The only types of values not acceptable as\\n keys are values containing lists or dictionaries or other\\n mutable types that are compared by value rather than by object\\n identity, the reason being that the efficient implementation of\\n dictionaries requires a key\\'s hash value to remain constant.\\n Numeric types used for keys obey the normal rules for numeric\\n comparison: if two numbers compare equal (e.g., ``1`` and\\n ``1.0``) then they can be used interchangeably to index the same\\n dictionary entry.\\n\\n Dictionaries are mutable; they can be created by the ``{...}``\\n notation (see section *Dictionary displays*).\\n\\n The extension modules ``dbm.ndbm`` and ``dbm.gnu`` provide\\n additional examples of mapping types, as does the\\n ``collections`` module.\\n\\nCallable types\\n These are the types to which the function call operation (see\\n section *Calls*) can be applied:\\n\\n User-defined functions\\n A user-defined function object is created by a function\\n definition (see section *Function definitions*). It should be\\n called with an argument list containing the same number of items\\n as the function\\'s formal parameter list.\\n\\n Special attributes:\\n\\n +---------------------------+---------------------------------+-------------+\\n | Attribute | Meaning | |\\n +===========================+=================================+=============+\\n | ``__doc__`` | The function\\'s documentation | Writable |\\n | | string, or ``None`` if | |\\n | | unavailable | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__name__`` | The function\\'s name | Writable |\\n +---------------------------+---------------------------------+-------------+\\n | ``__qualname__`` | The function\\'s *qualified name* | Writable |\\n | | New in version 3.3. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__module__`` | The name of the module the | Writable |\\n | | function was defined in, or | |\\n | | ``None`` if unavailable. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__defaults__`` | A tuple containing default | Writable |\\n | | argument values for those | |\\n | | arguments that have defaults, | |\\n | | or ``None`` if no arguments | |\\n | | have a default value | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__code__`` | The code object representing | Writable |\\n | | the compiled function body. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__globals__`` | A reference to the dictionary | Read-only |\\n | | that holds the function\\'s | |\\n | | global variables --- the global | |\\n | | namespace of the module in | |\\n | | which the function was defined. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__dict__`` | The namespace supporting | Writable |\\n | | arbitrary function attributes. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__closure__`` | ``None`` or a tuple of cells | Read-only |\\n | | that contain bindings for the | |\\n | | function\\'s free variables. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__annotations__`` | A dict containing annotations | Writable |\\n | | of parameters. The keys of the | |\\n | | dict are the parameter names, | |\\n | | or ``\\'return\\'`` for the return | |\\n | | annotation, if provided. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__kwdefaults__`` | A dict containing defaults for | Writable |\\n | | keyword-only parameters. | |\\n +---------------------------+---------------------------------+-------------+\\n\\n Most of the attributes labelled \"Writable\" check the type of the\\n assigned value.\\n\\n Function objects also support getting and setting arbitrary\\n attributes, which can be used, for example, to attach metadata\\n to functions. Regular attribute dot-notation is used to get and\\n set such attributes. *Note that the current implementation only\\n supports function attributes on user-defined functions. Function\\n attributes on built-in functions may be supported in the\\n future.*\\n\\n Additional information about a function\\'s definition can be\\n retrieved from its code object; see the description of internal\\n types below.\\n\\n Instance methods\\n An instance method object combines a class, a class instance and\\n any callable object (normally a user-defined function).\\n\\n Special read-only attributes: ``__self__`` is the class instance\\n object, ``__func__`` is the function object; ``__doc__`` is the\\n method\\'s documentation (same as ``__func__.__doc__``);\\n ``__name__`` is the method name (same as ``__func__.__name__``);\\n ``__module__`` is the name of the module the method was defined\\n in, or ``None`` if unavailable.\\n\\n Methods also support accessing (but not setting) the arbitrary\\n function attributes on the underlying function object.\\n\\n User-defined method objects may be created when getting an\\n attribute of a class (perhaps via an instance of that class), if\\n that attribute is a user-defined function object or a class\\n method object.\\n\\n When an instance method object is created by retrieving a user-\\n defined function object from a class via one of its instances,\\n its ``__self__`` attribute is the instance, and the method\\n object is said to be bound. The new method\\'s ``__func__``\\n attribute is the original function object.\\n\\n When a user-defined method object is created by retrieving\\n another method object from a class or instance, the behaviour is\\n the same as for a function object, except that the ``__func__``\\n attribute of the new instance is not the original method object\\n but its ``__func__`` attribute.\\n\\n When an instance method object is created by retrieving a class\\n method object from a class or instance, its ``__self__``\\n attribute is the class itself, and its ``__func__`` attribute is\\n the function object underlying the class method.\\n\\n When an instance method object is called, the underlying\\n function (``__func__``) is called, inserting the class instance\\n (``__self__``) in front of the argument list. For instance,\\n when ``C`` is a class which contains a definition for a function\\n ``f()``, and ``x`` is an instance of ``C``, calling ``x.f(1)``\\n is equivalent to calling ``C.f(x, 1)``.\\n\\n When an instance method object is derived from a class method\\n object, the \"class instance\" stored in ``__self__`` will\\n actually be the class itself, so that calling either ``x.f(1)``\\n or ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is\\n the underlying function.\\n\\n Note that the transformation from function object to instance\\n method object happens each time the attribute is retrieved from\\n the instance. In some cases, a fruitful optimization is to\\n assign the attribute to a local variable and call that local\\n variable. Also notice that this transformation only happens for\\n user-defined functions; other callable objects (and all non-\\n callable objects) are retrieved without transformation. It is\\n also important to note that user-defined functions which are\\n attributes of a class instance are not converted to bound\\n methods; this *only* happens when the function is an attribute\\n of the class.\\n\\n Generator functions\\n A function or method which uses the ``yield`` statement (see\\n section *The yield statement*) is called a *generator function*.\\n Such a function, when called, always returns an iterator object\\n which can be used to execute the body of the function: calling\\n the iterator\\'s ``iterator__next__()`` method will cause the\\n function to execute until it provides a value using the\\n ``yield`` statement. When the function executes a ``return``\\n statement or falls off the end, a ``StopIteration`` exception is\\n raised and the iterator will have reached the end of the set of\\n values to be returned.\\n\\n Built-in functions\\n A built-in function object is a wrapper around a C function.\\n Examples of built-in functions are ``len()`` and ``math.sin()``\\n (``math`` is a standard built-in module). The number and type of\\n the arguments are determined by the C function. Special read-\\n only attributes: ``__doc__`` is the function\\'s documentation\\n string, or ``None`` if unavailable; ``__name__`` is the\\n function\\'s name; ``__self__`` is set to ``None`` (but see the\\n next item); ``__module__`` is the name of the module the\\n function was defined in or ``None`` if unavailable.\\n\\n Built-in methods\\n This is really a different disguise of a built-in function, this\\n time containing an object passed to the C function as an\\n implicit extra argument. An example of a built-in method is\\n ``alist.append()``, assuming *alist* is a list object. In this\\n case, the special read-only attribute ``__self__`` is set to the\\n object denoted by *alist*.\\n\\n Classes\\n Classes are callable. These objects normally act as factories\\n for new instances of themselves, but variations are possible for\\n class types that override ``__new__()``. The arguments of the\\n call are passed to ``__new__()`` and, in the typical case, to\\n ``__init__()`` to initialize the new instance.\\n\\n Class Instances\\n Instances of arbitrary classes can be made callable by defining\\n a ``__call__()`` method in their class.\\n\\nModules\\n Modules are a basic organizational unit of Python code, and are\\n created by the *import system* as invoked either by the ``import``\\n statement (see ``import``), or by calling functions such as\\n ``importlib.import_module()`` and built-in ``__import__()``. A\\n module object has a namespace implemented by a dictionary object\\n (this is the dictionary referenced by the ``__globals__`` attribute\\n of functions defined in the module). Attribute references are\\n translated to lookups in this dictionary, e.g., ``m.x`` is\\n equivalent to ``m.__dict__[\"x\"]``. A module object does not contain\\n the code object used to initialize the module (since it isn\\'t\\n needed once the initialization is done).\\n\\n Attribute assignment updates the module\\'s namespace dictionary,\\n e.g., ``m.x = 1`` is equivalent to ``m.__dict__[\"x\"] = 1``.\\n\\n Special read-only attribute: ``__dict__`` is the module\\'s namespace\\n as a dictionary object.\\n\\n **CPython implementation detail:** Because of the way CPython\\n clears module dictionaries, the module dictionary will be cleared\\n when the module falls out of scope even if the dictionary still has\\n live references. To avoid this, copy the dictionary or keep the\\n module around while using its dictionary directly.\\n\\n Predefined (writable) attributes: ``__name__`` is the module\\'s\\n name; ``__doc__`` is the module\\'s documentation string, or ``None``\\n if unavailable; ``__file__`` is the pathname of the file from which\\n the module was loaded, if it was loaded from a file. The\\n ``__file__`` attribute may be missing for certain types of modules,\\n such as C modules that are statically linked into the interpreter;\\n for extension modules loaded dynamically from a shared library, it\\n is the pathname of the shared library file.\\n\\nCustom classes\\n Custom class types are typically created by class definitions (see\\n section *Class definitions*). A class has a namespace implemented\\n by a dictionary object. Class attribute references are translated\\n to lookups in this dictionary, e.g., ``C.x`` is translated to\\n ``C.__dict__[\"x\"]`` (although there are a number of hooks which\\n allow for other means of locating attributes). When the attribute\\n name is not found there, the attribute search continues in the base\\n classes. This search of the base classes uses the C3 method\\n resolution order which behaves correctly even in the presence of\\n \\'diamond\\' inheritance structures where there are multiple\\n inheritance paths leading back to a common ancestor. Additional\\n details on the C3 MRO used by Python can be found in the\\n documentation accompanying the 2.3 release at\\n http://www.python.org/download/releases/2.3/mro/.\\n\\n When a class attribute reference (for class ``C``, say) would yield\\n a class method object, it is transformed into an instance method\\n object whose ``__self__`` attributes is ``C``. When it would yield\\n a static method object, it is transformed into the object wrapped\\n by the static method object. See section *Implementing Descriptors*\\n for another way in which attributes retrieved from a class may\\n differ from those actually contained in its ``__dict__``.\\n\\n Class attribute assignments update the class\\'s dictionary, never\\n the dictionary of a base class.\\n\\n A class object can be called (see above) to yield a class instance\\n (see below).\\n\\n Special attributes: ``__name__`` is the class name; ``__module__``\\n is the module name in which the class was defined; ``__dict__`` is\\n the dictionary containing the class\\'s namespace; ``__bases__`` is a\\n tuple (possibly empty or a singleton) containing the base classes,\\n in the order of their occurrence in the base class list;\\n ``__doc__`` is the class\\'s documentation string, or None if\\n undefined.\\n\\nClass instances\\n A class instance is created by calling a class object (see above).\\n A class instance has a namespace implemented as a dictionary which\\n is the first place in which attribute references are searched.\\n When an attribute is not found there, and the instance\\'s class has\\n an attribute by that name, the search continues with the class\\n attributes. If a class attribute is found that is a user-defined\\n function object, it is transformed into an instance method object\\n whose ``__self__`` attribute is the instance. Static method and\\n class method objects are also transformed; see above under\\n \"Classes\". See section *Implementing Descriptors* for another way\\n in which attributes of a class retrieved via its instances may\\n differ from the objects actually stored in the class\\'s\\n ``__dict__``. If no class attribute is found, and the object\\'s\\n class has a ``__getattr__()`` method, that is called to satisfy the\\n lookup.\\n\\n Attribute assignments and deletions update the instance\\'s\\n dictionary, never a class\\'s dictionary. If the class has a\\n ``__setattr__()`` or ``__delattr__()`` method, this is called\\n instead of updating the instance dictionary directly.\\n\\n Class instances can pretend to be numbers, sequences, or mappings\\n if they have methods with certain special names. See section\\n *Special method names*.\\n\\n Special attributes: ``__dict__`` is the attribute dictionary;\\n ``__class__`` is the instance\\'s class.\\n\\nI/O objects (also known as file objects)\\n A *file object* represents an open file. Various shortcuts are\\n available to create file objects: the ``open()`` built-in function,\\n and also ``os.popen()``, ``os.fdopen()``, and the ``makefile()``\\n method of socket objects (and perhaps by other functions or methods\\n provided by extension modules).\\n\\n The objects ``sys.stdin``, ``sys.stdout`` and ``sys.stderr`` are\\n initialized to file objects corresponding to the interpreter\\'s\\n standard input, output and error streams; they are all open in text\\n mode and therefore follow the interface defined by the\\n ``io.TextIOBase`` abstract class.\\n\\nInternal types\\n A few types used internally by the interpreter are exposed to the\\n user. Their definitions may change with future versions of the\\n interpreter, but they are mentioned here for completeness.\\n\\n Code objects\\n Code objects represent *byte-compiled* executable Python code,\\n or *bytecode*. The difference between a code object and a\\n function object is that the function object contains an explicit\\n reference to the function\\'s globals (the module in which it was\\n defined), while a code object contains no context; also the\\n default argument values are stored in the function object, not\\n in the code object (because they represent values calculated at\\n run-time). Unlike function objects, code objects are immutable\\n and contain no references (directly or indirectly) to mutable\\n objects.\\n\\n Special read-only attributes: ``co_name`` gives the function\\n name; ``co_argcount`` is the number of positional arguments\\n (including arguments with default values); ``co_nlocals`` is the\\n number of local variables used by the function (including\\n arguments); ``co_varnames`` is a tuple containing the names of\\n the local variables (starting with the argument names);\\n ``co_cellvars`` is a tuple containing the names of local\\n variables that are referenced by nested functions;\\n ``co_freevars`` is a tuple containing the names of free\\n variables; ``co_code`` is a string representing the sequence of\\n bytecode instructions; ``co_consts`` is a tuple containing the\\n literals used by the bytecode; ``co_names`` is a tuple\\n containing the names used by the bytecode; ``co_filename`` is\\n the filename from which the code was compiled;\\n ``co_firstlineno`` is the first line number of the function;\\n ``co_lnotab`` is a string encoding the mapping from bytecode\\n offsets to line numbers (for details see the source code of the\\n interpreter); ``co_stacksize`` is the required stack size\\n (including local variables); ``co_flags`` is an integer encoding\\n a number of flags for the interpreter.\\n\\n The following flag bits are defined for ``co_flags``: bit\\n ``0x04`` is set if the function uses the ``*arguments`` syntax\\n to accept an arbitrary number of positional arguments; bit\\n ``0x08`` is set if the function uses the ``**keywords`` syntax\\n to accept arbitrary keyword arguments; bit ``0x20`` is set if\\n the function is a generator.\\n\\n Future feature declarations (``from __future__ import\\n division``) also use bits in ``co_flags`` to indicate whether a\\n code object was compiled with a particular feature enabled: bit\\n ``0x2000`` is set if the function was compiled with future\\n division enabled; bits ``0x10`` and ``0x1000`` were used in\\n earlier versions of Python.\\n\\n Other bits in ``co_flags`` are reserved for internal use.\\n\\n If a code object represents a function, the first item in\\n ``co_consts`` is the documentation string of the function, or\\n ``None`` if undefined.\\n\\n Frame objects\\n Frame objects represent execution frames. They may occur in\\n traceback objects (see below).\\n\\n Special read-only attributes: ``f_back`` is to the previous\\n stack frame (towards the caller), or ``None`` if this is the\\n bottom stack frame; ``f_code`` is the code object being executed\\n in this frame; ``f_locals`` is the dictionary used to look up\\n local variables; ``f_globals`` is used for global variables;\\n ``f_builtins`` is used for built-in (intrinsic) names;\\n ``f_lasti`` gives the precise instruction (this is an index into\\n the bytecode string of the code object).\\n\\n Special writable attributes: ``f_trace``, if not ``None``, is a\\n function called at the start of each source code line (this is\\n used by the debugger); ``f_lineno`` is the current line number\\n of the frame --- writing to this from within a trace function\\n jumps to the given line (only for the bottom-most frame). A\\n debugger can implement a Jump command (aka Set Next Statement)\\n by writing to f_lineno.\\n\\n Traceback objects\\n Traceback objects represent a stack trace of an exception. A\\n traceback object is created when an exception occurs. When the\\n search for an exception handler unwinds the execution stack, at\\n each unwound level a traceback object is inserted in front of\\n the current traceback. When an exception handler is entered,\\n the stack trace is made available to the program. (See section\\n *The try statement*.) It is accessible as the third item of the\\n tuple returned by ``sys.exc_info()``. When the program contains\\n no suitable handler, the stack trace is written (nicely\\n formatted) to the standard error stream; if the interpreter is\\n interactive, it is also made available to the user as\\n ``sys.last_traceback``.\\n\\n Special read-only attributes: ``tb_next`` is the next level in\\n the stack trace (towards the frame where the exception\\n occurred), or ``None`` if there is no next level; ``tb_frame``\\n points to the execution frame of the current level;\\n ``tb_lineno`` gives the line number where the exception\\n occurred; ``tb_lasti`` indicates the precise instruction. The\\n line number and last instruction in the traceback may differ\\n from the line number of its frame object if the exception\\n occurred in a ``try`` statement with no matching except clause\\n or with a finally clause.\\n\\n Slice objects\\n Slice objects are used to represent slices for ``__getitem__()``\\n methods. They are also created by the built-in ``slice()``\\n function.\\n\\n Special read-only attributes: ``start`` is the lower bound;\\n ``stop`` is the upper bound; ``step`` is the step value; each is\\n ``None`` if omitted. These attributes can have any type.\\n\\n Slice objects support one method:\\n\\n slice.indices(self, length)\\n\\n This method takes a single integer argument *length* and\\n computes information about the slice that the slice object\\n would describe if applied to a sequence of *length* items.\\n It returns a tuple of three integers; respectively these are\\n the *start* and *stop* indices and the *step* or stride\\n length of the slice. Missing or out-of-bounds indices are\\n handled in a manner consistent with regular slices.\\n\\n Static method objects\\n Static method objects provide a way of defeating the\\n transformation of function objects to method objects described\\n above. A static method object is a wrapper around any other\\n object, usually a user-defined method object. When a static\\n method object is retrieved from a class or a class instance, the\\n object actually returned is the wrapped object, which is not\\n subject to any further transformation. Static method objects are\\n not themselves callable, although the objects they wrap usually\\n are. Static method objects are created by the built-in\\n ``staticmethod()`` constructor.\\n\\n Class method objects\\n A class method object, like a static method object, is a wrapper\\n around another object that alters the way in which that object\\n is retrieved from classes and class instances. The behaviour of\\n class method objects upon such retrieval is described above,\\n under \"User-defined methods\". Class method objects are created\\n by the built-in ``classmethod()`` constructor.\\n',\n'typesfunctions':'\\nFunctions\\n*********\\n\\nFunction objects are created by function definitions. The only\\noperation on a function object is to call it: ``func(argument-list)``.\\n\\nThere are really two flavors of function objects: built-in functions\\nand user-defined functions. Both support the same operation (to call\\nthe function), but the implementation is different, hence the\\ndifferent object types.\\n\\nSee *Function definitions* for more information.\\n',\n'typesmapping':'\\nMapping Types --- ``dict``\\n**************************\\n\\nA *mapping* object maps *hashable* values to arbitrary objects.\\nMappings are mutable objects. There is currently only one standard\\nmapping type, the *dictionary*. (For other containers see the built-\\nin ``list``, ``set``, and ``tuple`` classes, and the ``collections``\\nmodule.)\\n\\nA dictionary\\'s keys are *almost* arbitrary values. Values that are\\nnot *hashable*, that is, values containing lists, dictionaries or\\nother mutable types (that are compared by value rather than by object\\nidentity) may not be used as keys. Numeric types used for keys obey\\nthe normal rules for numeric comparison: if two numbers compare equal\\n(such as ``1`` and ``1.0``) then they can be used interchangeably to\\nindex the same dictionary entry. (Note however, that since computers\\nstore floating-point numbers as approximations it is usually unwise to\\nuse them as dictionary keys.)\\n\\nDictionaries can be created by placing a comma-separated list of\\n``key: value`` pairs within braces, for example: ``{\\'jack\\': 4098,\\n\\'sjoerd\\': 4127}`` or ``{4098: \\'jack\\', 4127: \\'sjoerd\\'}``, or by the\\n``dict`` constructor.\\n\\nclass class dict(**kwarg)\\nclass class dict(mapping, **kwarg)\\nclass class dict(iterable, **kwarg)\\n\\n Return a new dictionary initialized from an optional positional\\n argument and a possibly empty set of keyword arguments.\\n\\n If no positional argument is given, an empty dictionary is created.\\n If a positional argument is given and it is a mapping object, a\\n dictionary is created with the same key-value pairs as the mapping\\n object. Otherwise, the positional argument must be an *iterator*\\n object. Each item in the iterable must itself be an iterator with\\n exactly two objects. The first object of each item becomes a key\\n in the new dictionary, and the second object the corresponding\\n value. If a key occurs more than once, the last value for that key\\n becomes the corresponding value in the new dictionary.\\n\\n If keyword arguments are given, the keyword arguments and their\\n values are added to the dictionary created from the positional\\n argument. If a key being added is already present, the value from\\n the keyword argument replaces the value from the positional\\n argument.\\n\\n To illustrate, the following examples all return a dictionary equal\\n to ``{\"one\": 1, \"two\": 2, \"three\": 3}``:\\n\\n >>> a = dict(one=1, two=2, three=3)\\n >>> b = {\\'one\\': 1, \\'two\\': 2, \\'three\\': 3}\\n >>> c = dict(zip([\\'one\\', \\'two\\', \\'three\\'], [1, 2, 3]))\\n >>> d = dict([(\\'two\\', 2), (\\'one\\', 1), (\\'three\\', 3)])\\n >>> e = dict({\\'three\\': 3, \\'one\\': 1, \\'two\\': 2})\\n >>> a == b == c == d == e\\n True\\n\\n Providing keyword arguments as in the first example only works for\\n keys that are valid Python identifiers. Otherwise, any valid keys\\n can be used.\\n\\n These are the operations that dictionaries support (and therefore,\\n custom mapping types should support too):\\n\\n len(d)\\n\\n Return the number of items in the dictionary *d*.\\n\\n d[key]\\n\\n Return the item of *d* with key *key*. Raises a ``KeyError`` if\\n *key* is not in the map.\\n\\n If a subclass of dict defines a method ``__missing__()``, if the\\n key *key* is not present, the ``d[key]`` operation calls that\\n method with the key *key* as argument. The ``d[key]`` operation\\n then returns or raises whatever is returned or raised by the\\n ``__missing__(key)`` call if the key is not present. No other\\n operations or methods invoke ``__missing__()``. If\\n ``__missing__()`` is not defined, ``KeyError`` is raised.\\n ``__missing__()`` must be a method; it cannot be an instance\\n variable:\\n\\n >>> class Counter(dict):\\n ... def __missing__(self, key):\\n ... return 0\\n >>> c = Counter()\\n >>> c[\\'red\\']\\n 0\\n >>> c[\\'red\\'] += 1\\n >>> c[\\'red\\']\\n 1\\n\\n See ``collections.Counter`` for a complete implementation\\n including other methods helpful for accumulating and managing\\n tallies.\\n\\n d[key] = value\\n\\n Set ``d[key]`` to *value*.\\n\\n del d[key]\\n\\n Remove ``d[key]`` from *d*. Raises a ``KeyError`` if *key* is\\n not in the map.\\n\\n key in d\\n\\n Return ``True`` if *d* has a key *key*, else ``False``.\\n\\n key not in d\\n\\n Equivalent to ``not key in d``.\\n\\n iter(d)\\n\\n Return an iterator over the keys of the dictionary. This is a\\n shortcut for ``iter(d.keys())``.\\n\\n clear()\\n\\n Remove all items from the dictionary.\\n\\n copy()\\n\\n Return a shallow copy of the dictionary.\\n\\n classmethod fromkeys(seq[, value])\\n\\n Create a new dictionary with keys from *seq* and values set to\\n *value*.\\n\\n ``fromkeys()`` is a class method that returns a new dictionary.\\n *value* defaults to ``None``.\\n\\n get(key[, default])\\n\\n Return the value for *key* if *key* is in the dictionary, else\\n *default*. If *default* is not given, it defaults to ``None``,\\n so that this method never raises a ``KeyError``.\\n\\n items()\\n\\n Return a new view of the dictionary\\'s items (``(key, value)``\\n pairs). See the *documentation of view objects*.\\n\\n keys()\\n\\n Return a new view of the dictionary\\'s keys. See the\\n *documentation of view objects*.\\n\\n pop(key[, default])\\n\\n If *key* is in the dictionary, remove it and return its value,\\n else return *default*. If *default* is not given and *key* is\\n not in the dictionary, a ``KeyError`` is raised.\\n\\n popitem()\\n\\n Remove and return an arbitrary ``(key, value)`` pair from the\\n dictionary.\\n\\n ``popitem()`` is useful to destructively iterate over a\\n dictionary, as often used in set algorithms. If the dictionary\\n is empty, calling ``popitem()`` raises a ``KeyError``.\\n\\n setdefault(key[, default])\\n\\n If *key* is in the dictionary, return its value. If not, insert\\n *key* with a value of *default* and return *default*. *default*\\n defaults to ``None``.\\n\\n update([other])\\n\\n Update the dictionary with the key/value pairs from *other*,\\n overwriting existing keys. Return ``None``.\\n\\n ``update()`` accepts either another dictionary object or an\\n iterable of key/value pairs (as tuples or other iterables of\\n length two). If keyword arguments are specified, the dictionary\\n is then updated with those key/value pairs: ``d.update(red=1,\\n blue=2)``.\\n\\n values()\\n\\n Return a new view of the dictionary\\'s values. See the\\n *documentation of view objects*.\\n\\nSee also:\\n\\n ``types.MappingProxyType`` can be used to create a read-only view\\n of a ``dict``.\\n\\n\\nDictionary view objects\\n=======================\\n\\nThe objects returned by ``dict.keys()``, ``dict.values()`` and\\n``dict.items()`` are *view objects*. They provide a dynamic view on\\nthe dictionary\\'s entries, which means that when the dictionary\\nchanges, the view reflects these changes.\\n\\nDictionary views can be iterated over to yield their respective data,\\nand support membership tests:\\n\\nlen(dictview)\\n\\n Return the number of entries in the dictionary.\\n\\niter(dictview)\\n\\n Return an iterator over the keys, values or items (represented as\\n tuples of ``(key, value)``) in the dictionary.\\n\\n Keys and values are iterated over in an arbitrary order which is\\n non-random, varies across Python implementations, and depends on\\n the dictionary\\'s history of insertions and deletions. If keys,\\n values and items views are iterated over with no intervening\\n modifications to the dictionary, the order of items will directly\\n correspond. This allows the creation of ``(value, key)`` pairs\\n using ``zip()``: ``pairs = zip(d.values(), d.keys())``. Another\\n way to create the same list is ``pairs = [(v, k) for (k, v) in\\n d.items()]``.\\n\\n Iterating views while adding or deleting entries in the dictionary\\n may raise a ``RuntimeError`` or fail to iterate over all entries.\\n\\nx in dictview\\n\\n Return ``True`` if *x* is in the underlying dictionary\\'s keys,\\n values or items (in the latter case, *x* should be a ``(key,\\n value)`` tuple).\\n\\nKeys views are set-like since their entries are unique and hashable.\\nIf all values are hashable, so that ``(key, value)`` pairs are unique\\nand hashable, then the items view is also set-like. (Values views are\\nnot treated as set-like since the entries are generally not unique.)\\nFor set-like views, all of the operations defined for the abstract\\nbase class ``collections.abc.Set`` are available (for example, ``==``,\\n``<``, or ``^``).\\n\\nAn example of dictionary view usage:\\n\\n >>> dishes = {\\'eggs\\': 2, \\'sausage\\': 1, \\'bacon\\': 1, \\'spam\\': 500}\\n >>> keys = dishes.keys()\\n >>> values = dishes.values()\\n\\n >>> # iteration\\n >>> n = 0\\n >>> for val in values:\\n ... n += val\\n >>> print(n)\\n 504\\n\\n >>> # keys and values are iterated over in the same order\\n >>> list(keys)\\n [\\'eggs\\', \\'bacon\\', \\'sausage\\', \\'spam\\']\\n >>> list(values)\\n [2, 1, 1, 500]\\n\\n >>> # view objects are dynamic and reflect dict changes\\n >>> del dishes[\\'eggs\\']\\n >>> del dishes[\\'sausage\\']\\n >>> list(keys)\\n [\\'spam\\', \\'bacon\\']\\n\\n >>> # set operations\\n >>> keys & {\\'eggs\\', \\'bacon\\', \\'salad\\'}\\n {\\'bacon\\'}\\n >>> keys ^ {\\'sausage\\', \\'juice\\'}\\n {\\'juice\\', \\'sausage\\', \\'bacon\\', \\'spam\\'}\\n',\n'typesmethods':'\\nMethods\\n*******\\n\\nMethods are functions that are called using the attribute notation.\\nThere are two flavors: built-in methods (such as ``append()`` on\\nlists) and class instance methods. Built-in methods are described\\nwith the types that support them.\\n\\nIf you access a method (a function defined in a class namespace)\\nthrough an instance, you get a special object: a *bound method* (also\\ncalled *instance method*) object. When called, it will add the\\n``self`` argument to the argument list. Bound methods have two\\nspecial read-only attributes: ``m.__self__`` is the object on which\\nthe method operates, and ``m.__func__`` is the function implementing\\nthe method. Calling ``m(arg-1, arg-2, ..., arg-n)`` is completely\\nequivalent to calling ``m.__func__(m.__self__, arg-1, arg-2, ...,\\narg-n)``.\\n\\nLike function objects, bound method objects support getting arbitrary\\nattributes. However, since method attributes are actually stored on\\nthe underlying function object (``meth.__func__``), setting method\\nattributes on bound methods is disallowed. Attempting to set an\\nattribute on a method results in an ``AttributeError`` being raised.\\nIn order to set a method attribute, you need to explicitly set it on\\nthe underlying function object:\\n\\n >>> class C:\\n ... def method(self):\\n ... pass\\n ...\\n >>> c = C()\\n >>> c.method.whoami = \\'my name is method\\' # can\\'t set on the method\\n Traceback (most recent call last):\\n File \"\", line 1, in \\n AttributeError: \\'method\\' object has no attribute \\'whoami\\'\\n >>> c.method.__func__.whoami = \\'my name is method\\'\\n >>> c.method.whoami\\n \\'my name is method\\'\\n\\nSee *The standard type hierarchy* for more information.\\n',\n'typesmodules':\"\\nModules\\n*******\\n\\nThe only special operation on a module is attribute access:\\n``m.name``, where *m* is a module and *name* accesses a name defined\\nin *m*'s symbol table. Module attributes can be assigned to. (Note\\nthat the ``import`` statement is not, strictly speaking, an operation\\non a module object; ``import foo`` does not require a module object\\nnamed *foo* to exist, rather it requires an (external) *definition*\\nfor a module named *foo* somewhere.)\\n\\nA special attribute of every module is ``__dict__``. This is the\\ndictionary containing the module's symbol table. Modifying this\\ndictionary will actually change the module's symbol table, but direct\\nassignment to the ``__dict__`` attribute is not possible (you can\\nwrite ``m.__dict__['a'] = 1``, which defines ``m.a`` to be ``1``, but\\nyou can't write ``m.__dict__ = {}``). Modifying ``__dict__`` directly\\nis not recommended.\\n\\nModules built into the interpreter are written like this: ````. If loaded from a file, they are written as\\n````.\\n\",\n'typesseq':'\\nSequence Types --- ``list``, ``tuple``, ``range``\\n*************************************************\\n\\nThere are three basic sequence types: lists, tuples, and range\\nobjects. Additional sequence types tailored for processing of *binary\\ndata* and *text strings* are described in dedicated sections.\\n\\n\\nCommon Sequence Operations\\n==========================\\n\\nThe operations in the following table are supported by most sequence\\ntypes, both mutable and immutable. The ``collections.abc.Sequence``\\nABC is provided to make it easier to correctly implement these\\noperations on custom sequence types.\\n\\nThis table lists the sequence operations sorted in ascending priority\\n(operations in the same box have the same priority). In the table,\\n*s* and *t* are sequences of the same type, *n*, *i*, *j* and *k* are\\nintegers and *x* is an arbitrary object that meets any type and value\\nrestrictions imposed by *s*.\\n\\nThe ``in`` and ``not in`` operations have the same priorities as the\\ncomparison operations. The ``+`` (concatenation) and ``*``\\n(repetition) operations have the same priority as the corresponding\\nnumeric operations.\\n\\n+----------------------------+----------------------------------+------------+\\n| Operation | Result | Notes |\\n+============================+==================================+============+\\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\\n| | equal to *x*, else ``False`` | |\\n+----------------------------+----------------------------------+------------+\\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\\n| | equal to *x*, else ``True`` | |\\n+----------------------------+----------------------------------+------------+\\n| ``s + t`` | the concatenation of *s* and *t* | (6)(7) |\\n+----------------------------+----------------------------------+------------+\\n| ``s * n`` or ``n * s`` | *n* shallow copies of *s* | (2)(7) |\\n| | concatenated | |\\n+----------------------------+----------------------------------+------------+\\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\\n+----------------------------+----------------------------------+------------+\\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\\n+----------------------------+----------------------------------+------------+\\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\\n| | with step *k* | |\\n+----------------------------+----------------------------------+------------+\\n| ``len(s)`` | length of *s* | |\\n+----------------------------+----------------------------------+------------+\\n| ``min(s)`` | smallest item of *s* | |\\n+----------------------------+----------------------------------+------------+\\n| ``max(s)`` | largest item of *s* | |\\n+----------------------------+----------------------------------+------------+\\n| ``s.index(x[, i[, j]])`` | index of the first occurence of | (8) |\\n| | *x* in *s* (at or after index | |\\n| | *i* and before index *j*) | |\\n+----------------------------+----------------------------------+------------+\\n| ``s.count(x)`` | total number of occurences of | |\\n| | *x* in *s* | |\\n+----------------------------+----------------------------------+------------+\\n\\nSequences of the same type also support comparisons. In particular,\\ntuples and lists are compared lexicographically by comparing\\ncorresponding elements. This means that to compare equal, every\\nelement must compare equal and the two sequences must be of the same\\ntype and have the same length. (For full details see *Comparisons* in\\nthe language reference.)\\n\\nNotes:\\n\\n1. While the ``in`` and ``not in`` operations are used only for simple\\n containment testing in the general case, some specialised sequences\\n (such as ``str``, ``bytes`` and ``bytearray``) also use them for\\n subsequence testing:\\n\\n >>> \"gg\" in \"eggs\"\\n True\\n\\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\\n empty sequence of the same type as *s*). Note also that the copies\\n are shallow; nested structures are not copied. This often haunts\\n new Python programmers; consider:\\n\\n >>> lists = [[]] * 3\\n >>> lists\\n [[], [], []]\\n >>> lists[0].append(3)\\n >>> lists\\n [[3], [3], [3]]\\n\\n What has happened is that ``[[]]`` is a one-element list containing\\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\\n to) this single empty list. Modifying any of the elements of\\n ``lists`` modifies this single list. You can create a list of\\n different lists this way:\\n\\n >>> lists = [[] for i in range(3)]\\n >>> lists[0].append(3)\\n >>> lists[1].append(5)\\n >>> lists[2].append(7)\\n >>> lists\\n [[3], [5], [7]]\\n\\n3. If *i* or *j* is negative, the index is relative to the end of the\\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\\n that ``-0`` is still ``0``.\\n\\n4. The slice of *s* from *i* to *j* is defined as the sequence of\\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\\n ``None``, use ``0``. If *j* is omitted or ``None``, use\\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\\n empty.\\n\\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\\n never including *j*). If *i* or *j* is greater than ``len(s)``,\\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\\n \"end\" values (which end depends on the sign of *k*). Note, *k*\\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\\n\\n6. Concatenating immutable sequences always results in a new object.\\n This means that building up a sequence by repeated concatenation\\n will have a quadratic runtime cost in the total sequence length.\\n To get a linear runtime cost, you must switch to one of the\\n alternatives below:\\n\\n * if concatenating ``str`` objects, you can build a list and use\\n ``str.join()`` at the end or else write to a ``io.StringIO``\\n instance and retrieve its value when complete\\n\\n * if concatenating ``bytes`` objects, you can similarly use\\n ``bytes.join()`` or ``io.BytesIO``, or you can do in-place\\n concatenation with a ``bytearray`` object. ``bytearray`` objects\\n are mutable and have an efficient overallocation mechanism\\n\\n * if concatenating ``tuple`` objects, extend a ``list`` instead\\n\\n * for other types, investigate the relevant class documentation\\n\\n7. Some sequence types (such as ``range``) only support item sequences\\n that follow specific patterns, and hence don\\'t support sequence\\n concatenation or repetition.\\n\\n8. ``index`` raises ``ValueError`` when *x* is not found in *s*. When\\n supported, the additional arguments to the index method allow\\n efficient searching of subsections of the sequence. Passing the\\n extra arguments is roughly equivalent to using ``s[i:j].index(x)``,\\n only without copying any data and with the returned index being\\n relative to the start of the sequence rather than the start of the\\n slice.\\n\\n\\nImmutable Sequence Types\\n========================\\n\\nThe only operation that immutable sequence types generally implement\\nthat is not also implemented by mutable sequence types is support for\\nthe ``hash()`` built-in.\\n\\nThis support allows immutable sequences, such as ``tuple`` instances,\\nto be used as ``dict`` keys and stored in ``set`` and ``frozenset``\\ninstances.\\n\\nAttempting to hash an immutable sequence that contains unhashable\\nvalues will result in ``TypeError``.\\n\\n\\nMutable Sequence Types\\n======================\\n\\nThe operations in the following table are defined on mutable sequence\\ntypes. The ``collections.abc.MutableSequence`` ABC is provided to make\\nit easier to correctly implement these operations on custom sequence\\ntypes.\\n\\nIn the table *s* is an instance of a mutable sequence type, *t* is any\\niterable object and *x* is an arbitrary object that meets any type and\\nvalue restrictions imposed by *s* (for example, ``bytearray`` only\\naccepts integers that meet the value restriction ``0 <= x <= 255``).\\n\\n+--------------------------------+----------------------------------+-----------------------+\\n| Operation | Result | Notes |\\n+================================+==================================+=======================+\\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\\n| | *x* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\\n| | replaced by the contents of the | |\\n| | iterable *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\\n| | replaced by those of *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j:k]`` | removes the elements of | |\\n| | ``s[i:j:k]`` from the list | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.append(x)`` | appends *x* to the end of the | |\\n| | sequence (same as | |\\n| | ``s[len(s):len(s)] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.clear()`` | removes all items from ``s`` | (5) |\\n| | (same as ``del s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.copy()`` | creates a shallow copy of ``s`` | (5) |\\n| | (same as ``s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.extend(t)`` | extends *s* with the contents of | |\\n| | *t* (same as ``s[len(s):len(s)] | |\\n| | = t``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.insert(i, x)`` | inserts *x* into *s* at the | |\\n| | index given by *i* (same as | |\\n| | ``s[i:i] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.pop([i])`` | retrieves the item at *i* and | (2) |\\n| | also removes it from *s* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.remove(x)`` | remove the first item from *s* | (3) |\\n| | where ``s[i] == x`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.reverse()`` | reverses the items of *s* in | (4) |\\n| | place | |\\n+--------------------------------+----------------------------------+-----------------------+\\n\\nNotes:\\n\\n1. *t* must have the same length as the slice it is replacing.\\n\\n2. The optional argument *i* defaults to ``-1``, so that by default\\n the last item is removed and returned.\\n\\n3. ``remove`` raises ``ValueError`` when *x* is not found in *s*.\\n\\n4. The ``reverse()`` method modifies the sequence in place for economy\\n of space when reversing a large sequence. To remind users that it\\n operates by side effect, it does not return the reversed sequence.\\n\\n5. ``clear()`` and ``copy()`` are included for consistency with the\\n interfaces of mutable containers that don\\'t support slicing\\n operations (such as ``dict`` and ``set``)\\n\\n New in version 3.3: ``clear()`` and ``copy()`` methods.\\n\\n\\nLists\\n=====\\n\\nLists are mutable sequences, typically used to store collections of\\nhomogeneous items (where the precise degree of similarity will vary by\\napplication).\\n\\nclass class list([iterable])\\n\\n Lists may be constructed in several ways:\\n\\n * Using a pair of square brackets to denote the empty list: ``[]``\\n\\n * Using square brackets, separating items with commas: ``[a]``,\\n ``[a, b, c]``\\n\\n * Using a list comprehension: ``[x for x in iterable]``\\n\\n * Using the type constructor: ``list()`` or ``list(iterable)``\\n\\n The constructor builds a list whose items are the same and in the\\n same order as *iterable*\\'s items. *iterable* may be either a\\n sequence, a container that supports iteration, or an iterator\\n object. If *iterable* is already a list, a copy is made and\\n returned, similar to ``iterable[:]``. For example, ``list(\\'abc\\')``\\n returns ``[\\'a\\', \\'b\\', \\'c\\']`` and ``list( (1, 2, 3) )`` returns ``[1,\\n 2, 3]``. If no argument is given, the constructor creates a new\\n empty list, ``[]``.\\n\\n Many other operations also produce lists, including the\\n ``sorted()`` built-in.\\n\\n Lists implement all of the *common* and *mutable* sequence\\n operations. Lists also provide the following additional method:\\n\\n sort(*, key=None, reverse=None)\\n\\n This method sorts the list in place, using only ``<``\\n comparisons between items. Exceptions are not suppressed - if\\n any comparison operations fail, the entire sort operation will\\n fail (and the list will likely be left in a partially modified\\n state).\\n\\n *key* specifies a function of one argument that is used to\\n extract a comparison key from each list element (for example,\\n ``key=str.lower``). The key corresponding to each item in the\\n list is calculated once and then used for the entire sorting\\n process. The default value of ``None`` means that list items are\\n sorted directly without calculating a separate key value.\\n\\n The ``functools.cmp_to_key()`` utility is available to convert a\\n 2.x style *cmp* function to a *key* function.\\n\\n *reverse* is a boolean value. If set to ``True``, then the list\\n elements are sorted as if each comparison were reversed.\\n\\n This method modifies the sequence in place for economy of space\\n when sorting a large sequence. To remind users that it operates\\n by side effect, it does not return the sorted sequence (use\\n ``sorted()`` to explicitly request a new sorted list instance).\\n\\n The ``sort()`` method is guaranteed to be stable. A sort is\\n stable if it guarantees not to change the relative order of\\n elements that compare equal --- this is helpful for sorting in\\n multiple passes (for example, sort by department, then by salary\\n grade).\\n\\n **CPython implementation detail:** While a list is being sorted,\\n the effect of attempting to mutate, or even inspect, the list is\\n undefined. The C implementation of Python makes the list appear\\n empty for the duration, and raises ``ValueError`` if it can\\n detect that the list has been mutated during a sort.\\n\\n\\nTuples\\n======\\n\\nTuples are immutable sequences, typically used to store collections of\\nheterogeneous data (such as the 2-tuples produced by the\\n``enumerate()`` built-in). Tuples are also used for cases where an\\nimmutable sequence of homogeneous data is needed (such as allowing\\nstorage in a ``set`` or ``dict`` instance).\\n\\nclass class tuple([iterable])\\n\\n Tuples may be constructed in a number of ways:\\n\\n * Using a pair of parentheses to denote the empty tuple: ``()``\\n\\n * Using a trailing comma for a singleton tuple: ``a,`` or ``(a,)``\\n\\n * Separating items with commas: ``a, b, c`` or ``(a, b, c)``\\n\\n * Using the ``tuple()`` built-in: ``tuple()`` or\\n ``tuple(iterable)``\\n\\n The constructor builds a tuple whose items are the same and in the\\n same order as *iterable*\\'s items. *iterable* may be either a\\n sequence, a container that supports iteration, or an iterator\\n object. If *iterable* is already a tuple, it is returned\\n unchanged. For example, ``tuple(\\'abc\\')`` returns ``(\\'a\\', \\'b\\',\\n \\'c\\')`` and ``tuple( [1, 2, 3] )`` returns ``(1, 2, 3)``. If no\\n argument is given, the constructor creates a new empty tuple,\\n ``()``.\\n\\n Note that it is actually the comma which makes a tuple, not the\\n parentheses. The parentheses are optional, except in the empty\\n tuple case, or when they are needed to avoid syntactic ambiguity.\\n For example, ``f(a, b, c)`` is a function call with three\\n arguments, while ``f((a, b, c))`` is a function call with a 3-tuple\\n as the sole argument.\\n\\n Tuples implement all of the *common* sequence operations.\\n\\nFor heterogeneous collections of data where access by name is clearer\\nthan access by index, ``collections.namedtuple()`` may be a more\\nappropriate choice than a simple tuple object.\\n\\n\\nRanges\\n======\\n\\nThe ``range`` type represents an immutable sequence of numbers and is\\ncommonly used for looping a specific number of times in ``for`` loops.\\n\\nclass class range(stop)\\nclass class range(start, stop[, step])\\n\\n The arguments to the range constructor must be integers (either\\n built-in ``int`` or any object that implements the ``__index__``\\n special method). If the *step* argument is omitted, it defaults to\\n ``1``. If the *start* argument is omitted, it defaults to ``0``. If\\n *step* is zero, ``ValueError`` is raised.\\n\\n For a positive *step*, the contents of a range ``r`` are determined\\n by the formula ``r[i] = start + step*i`` where ``i >= 0`` and\\n ``r[i] < stop``.\\n\\n For a negative *step*, the contents of the range are still\\n determined by the formula ``r[i] = start + step*i``, but the\\n constraints are ``i >= 0`` and ``r[i] > stop``.\\n\\n A range object will be empty if ``r[0]`` does not meet the value\\n constraint. Ranges do support negative indices, but these are\\n interpreted as indexing from the end of the sequence determined by\\n the positive indices.\\n\\n Ranges containing absolute values larger than ``sys.maxsize`` are\\n permitted but some features (such as ``len()``) may raise\\n ``OverflowError``.\\n\\n Range examples:\\n\\n >>> list(range(10))\\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\\n >>> list(range(1, 11))\\n [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\\n >>> list(range(0, 30, 5))\\n [0, 5, 10, 15, 20, 25]\\n >>> list(range(0, 10, 3))\\n [0, 3, 6, 9]\\n >>> list(range(0, -10, -1))\\n [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\\n >>> list(range(0))\\n []\\n >>> list(range(1, 0))\\n []\\n\\n Ranges implement all of the *common* sequence operations except\\n concatenation and repetition (due to the fact that range objects\\n can only represent sequences that follow a strict pattern and\\n repetition and concatenation will usually violate that pattern).\\n\\nThe advantage of the ``range`` type over a regular ``list`` or\\n``tuple`` is that a ``range`` object will always take the same (small)\\namount of memory, no matter the size of the range it represents (as it\\nonly stores the ``start``, ``stop`` and ``step`` values, calculating\\nindividual items and subranges as needed).\\n\\nRange objects implement the ``collections.Sequence`` ABC, and provide\\nfeatures such as containment tests, element index lookup, slicing and\\nsupport for negative indices (see *Sequence Types --- list, tuple,\\nrange*):\\n\\n>>> r = range(0, 20, 2)\\n>>> r\\nrange(0, 20, 2)\\n>>> 11 in r\\nFalse\\n>>> 10 in r\\nTrue\\n>>> r.index(10)\\n5\\n>>> r[5]\\n10\\n>>> r[:5]\\nrange(0, 10, 2)\\n>>> r[-1]\\n18\\n\\nTesting range objects for equality with ``==`` and ``!=`` compares\\nthem as sequences. That is, two range objects are considered equal if\\nthey represent the same sequence of values. (Note that two range\\nobjects that compare equal might have different ``start``, ``stop``\\nand ``step`` attributes, for example ``range(0) == range(2, 1, 3)`` or\\n``range(0, 3, 2) == range(0, 4, 2)``.)\\n\\nChanged in version 3.2: Implement the Sequence ABC. Support slicing\\nand negative indices. Test ``int`` objects for membership in constant\\ntime instead of iterating through all items.\\n\\nChanged in version 3.3: Define \\'==\\' and \\'!=\\' to compare range objects\\nbased on the sequence of values they define (instead of comparing\\nbased on object identity).\\n\\nNew in version 3.3: The ``start``, ``stop`` and ``step`` attributes.\\n',\n'typesseq-mutable':\"\\nMutable Sequence Types\\n**********************\\n\\nThe operations in the following table are defined on mutable sequence\\ntypes. The ``collections.abc.MutableSequence`` ABC is provided to make\\nit easier to correctly implement these operations on custom sequence\\ntypes.\\n\\nIn the table *s* is an instance of a mutable sequence type, *t* is any\\niterable object and *x* is an arbitrary object that meets any type and\\nvalue restrictions imposed by *s* (for example, ``bytearray`` only\\naccepts integers that meet the value restriction ``0 <= x <= 255``).\\n\\n+--------------------------------+----------------------------------+-----------------------+\\n| Operation | Result | Notes |\\n+================================+==================================+=======================+\\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\\n| | *x* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\\n| | replaced by the contents of the | |\\n| | iterable *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\\n| | replaced by those of *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j:k]`` | removes the elements of | |\\n| | ``s[i:j:k]`` from the list | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.append(x)`` | appends *x* to the end of the | |\\n| | sequence (same as | |\\n| | ``s[len(s):len(s)] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.clear()`` | removes all items from ``s`` | (5) |\\n| | (same as ``del s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.copy()`` | creates a shallow copy of ``s`` | (5) |\\n| | (same as ``s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.extend(t)`` | extends *s* with the contents of | |\\n| | *t* (same as ``s[len(s):len(s)] | |\\n| | = t``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.insert(i, x)`` | inserts *x* into *s* at the | |\\n| | index given by *i* (same as | |\\n| | ``s[i:i] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.pop([i])`` | retrieves the item at *i* and | (2) |\\n| | also removes it from *s* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.remove(x)`` | remove the first item from *s* | (3) |\\n| | where ``s[i] == x`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.reverse()`` | reverses the items of *s* in | (4) |\\n| | place | |\\n+--------------------------------+----------------------------------+-----------------------+\\n\\nNotes:\\n\\n1. *t* must have the same length as the slice it is replacing.\\n\\n2. The optional argument *i* defaults to ``-1``, so that by default\\n the last item is removed and returned.\\n\\n3. ``remove`` raises ``ValueError`` when *x* is not found in *s*.\\n\\n4. The ``reverse()`` method modifies the sequence in place for economy\\n of space when reversing a large sequence. To remind users that it\\n operates by side effect, it does not return the reversed sequence.\\n\\n5. ``clear()`` and ``copy()`` are included for consistency with the\\n interfaces of mutable containers that don't support slicing\\n operations (such as ``dict`` and ``set``)\\n\\n New in version 3.3: ``clear()`` and ``copy()`` methods.\\n\",\n'unary':'\\nUnary arithmetic and bitwise operations\\n***************************************\\n\\nAll unary arithmetic and bitwise operations have the same priority:\\n\\n u_expr ::= power | \"-\" u_expr | \"+\" u_expr | \"~\" u_expr\\n\\nThe unary ``-`` (minus) operator yields the negation of its numeric\\nargument.\\n\\nThe unary ``+`` (plus) operator yields its numeric argument unchanged.\\n\\nThe unary ``~`` (invert) operator yields the bitwise inversion of its\\ninteger argument. The bitwise inversion of ``x`` is defined as\\n``-(x+1)``. It only applies to integral numbers.\\n\\nIn all three cases, if the argument does not have the proper type, a\\n``TypeError`` exception is raised.\\n',\n'while':'\\nThe ``while`` statement\\n***********************\\n\\nThe ``while`` statement is used for repeated execution as long as an\\nexpression is true:\\n\\n while_stmt ::= \"while\" expression \":\" suite\\n [\"else\" \":\" suite]\\n\\nThis repeatedly tests the expression and, if it is true, executes the\\nfirst suite; if the expression is false (which may be the first time\\nit is tested) the suite of the ``else`` clause, if present, is\\nexecuted and the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ngoes back to testing the expression.\\n',\n'with':'\\nThe ``with`` statement\\n**********************\\n\\nThe ``with`` statement is used to wrap the execution of a block with\\nmethods defined by a context manager (see section *With Statement\\nContext Managers*). This allows common\\n``try``...``except``...``finally`` usage patterns to be encapsulated\\nfor convenient reuse.\\n\\n with_stmt ::= \"with\" with_item (\",\" with_item)* \":\" suite\\n with_item ::= expression [\"as\" target]\\n\\nThe execution of the ``with`` statement with one \"item\" proceeds as\\nfollows:\\n\\n1. The context expression (the expression given in the ``with_item``)\\n is evaluated to obtain a context manager.\\n\\n2. The context manager\\'s ``__exit__()`` is loaded for later use.\\n\\n3. The context manager\\'s ``__enter__()`` method is invoked.\\n\\n4. If a target was included in the ``with`` statement, the return\\n value from ``__enter__()`` is assigned to it.\\n\\n Note: The ``with`` statement guarantees that if the ``__enter__()``\\n method returns without an error, then ``__exit__()`` will always\\n be called. Thus, if an error occurs during the assignment to the\\n target list, it will be treated the same as an error occurring\\n within the suite would be. See step 6 below.\\n\\n5. The suite is executed.\\n\\n6. The context manager\\'s ``__exit__()`` method is invoked. If an\\n exception caused the suite to be exited, its type, value, and\\n traceback are passed as arguments to ``__exit__()``. Otherwise,\\n three ``None`` arguments are supplied.\\n\\n If the suite was exited due to an exception, and the return value\\n from the ``__exit__()`` method was false, the exception is\\n reraised. If the return value was true, the exception is\\n suppressed, and execution continues with the statement following\\n the ``with`` statement.\\n\\n If the suite was exited for any reason other than an exception, the\\n return value from ``__exit__()`` is ignored, and execution proceeds\\n at the normal location for the kind of exit that was taken.\\n\\nWith more than one item, the context managers are processed as if\\nmultiple ``with`` statements were nested:\\n\\n with A() as a, B() as b:\\n suite\\n\\nis equivalent to\\n\\n with A() as a:\\n with B() as b:\\n suite\\n\\nChanged in version 3.1: Support for multiple context expressions.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n',\n'yield':'\\nThe ``yield`` statement\\n***********************\\n\\n yield_stmt ::= yield_expression\\n\\nThe ``yield`` statement is only used when defining a generator\\nfunction, and is only used in the body of the generator function.\\nUsing a ``yield`` statement in a function definition is sufficient to\\ncause that definition to create a generator function instead of a\\nnormal function.\\n\\nWhen a generator function is called, it returns an iterator known as a\\ngenerator iterator, or more commonly, a generator. The body of the\\ngenerator function is executed by calling the ``next()`` function on\\nthe generator repeatedly until it raises an exception.\\n\\nWhen a ``yield`` statement is executed, the state of the generator is\\nfrozen and the value of ``expression_list`` is returned to\\n``next()``\\'s caller. By \"frozen\" we mean that all local state is\\nretained, including the current bindings of local variables, the\\ninstruction pointer, and the internal evaluation stack: enough\\ninformation is saved so that the next time ``next()`` is invoked, the\\nfunction can proceed exactly as if the ``yield`` statement were just\\nanother external call.\\n\\nThe ``yield`` statement is allowed in the ``try`` clause of a ``try``\\n... ``finally`` construct. If the generator is not resumed before it\\nis finalized (by reaching a zero reference count or by being garbage\\ncollected), the generator-iterator\\'s ``close()`` method will be\\ncalled, allowing any pending ``finally`` clauses to execute.\\n\\nWhen ``yield from `` is used, it treats the supplied expression\\nas a subiterator, producing values from it until the underlying\\niterator is exhausted.\\n\\n Changed in version 3.3: Added ``yield from `` to delegate\\n control flow to a subiterator\\n\\nFor full details of ``yield`` semantics, refer to the *Yield\\nexpressions* section.\\n\\nSee also:\\n\\n **PEP 0255** - Simple Generators\\n The proposal for adding generators and the ``yield`` statement\\n to Python.\\n\\n **PEP 0342** - Coroutines via Enhanced Generators\\n The proposal to enhance the API and syntax of generators, making\\n them usable as simple coroutines.\\n\\n **PEP 0380** - Syntax for Delegating to a Subgenerator\\n The proposal to introduce the ``yield_from`` syntax, making\\n delegation to sub-generators easy.\\n'}\n", []], "pydoc_data": [".py", "", [], 1], "unittest.case": [".py", "''\n\nimport sys\nimport functools\nimport difflib\nimport logging\nimport pprint\nimport re\nimport warnings\nimport collections\nimport contextlib\nimport traceback\n\nfrom . import result\nfrom .util import (strclass,safe_repr,_count_diff_all_purpose,\n_count_diff_hashable,_common_shorten_repr)\n\n__unittest=True\n\n_subtest_msg_sentinel=object()\n\nDIFF_OMITTED=('\\nDiff is %s characters long. '\n'Set self.maxDiff to None to see it.')\n\nclass SkipTest(Exception):\n ''\n\n\n\n\n \n \nclass _ShouldStop(Exception):\n ''\n\n \n \nclass _UnexpectedSuccess(Exception):\n ''\n\n \n \n \nclass _Outcome(object):\n def __init__(self,result=None ):\n self.expecting_failure=False\n self.result=result\n self.result_supports_subtests=hasattr(result,\"addSubTest\")\n self.success=True\n self.skipped=[]\n self.expectedFailure=None\n self.errors=[]\n \n @contextlib.contextmanager\n def testPartExecutor(self,test_case,isTest=False ):\n old_success=self.success\n self.success=True\n try :\n yield\n except KeyboardInterrupt:\n raise\n except SkipTest as e:\n self.success=False\n self.skipped.append((test_case,str(e)))\n except _ShouldStop:\n pass\n except :\n exc_info=sys.exc_info()\n if self.expecting_failure:\n self.expectedFailure=exc_info\n else :\n self.success=False\n self.errors.append((test_case,exc_info))\n \n \n exc_info=None\n else :\n if self.result_supports_subtests and self.success:\n self.errors.append((test_case,None ))\n finally :\n self.success=self.success and old_success\n \n \ndef _id(obj):\n return obj\n \ndef skip(reason):\n ''\n\n \n def decorator(test_item):\n if not isinstance(test_item,type):\n @functools.wraps(test_item)\n def skip_wrapper(*args,**kwargs):\n raise SkipTest(reason)\n test_item=skip_wrapper\n \n test_item.__unittest_skip__=True\n test_item.__unittest_skip_why__=reason\n return test_item\n return decorator\n \ndef skipIf(condition,reason):\n ''\n\n \n if condition:\n return skip(reason)\n return _id\n \ndef skipUnless(condition,reason):\n ''\n\n \n if not condition:\n return skip(reason)\n return _id\n \ndef expectedFailure(test_item):\n test_item.__unittest_expecting_failure__=True\n return test_item\n \ndef _is_subtype(expected,basetype):\n if isinstance(expected,tuple):\n return all(_is_subtype(e,basetype)for e in expected)\n return isinstance(expected,type)and issubclass(expected,basetype)\n \nclass _BaseTestCaseContext:\n\n def __init__(self,test_case):\n self.test_case=test_case\n \n def _raiseFailure(self,standardMsg):\n msg=self.test_case._formatMessage(self.msg,standardMsg)\n raise self.test_case.failureException(msg)\n \nclass _AssertRaisesBaseContext(_BaseTestCaseContext):\n\n def __init__(self,expected,test_case,expected_regex=None ):\n _BaseTestCaseContext.__init__(self,test_case)\n self.expected=expected\n self.test_case=test_case\n if expected_regex is not None :\n expected_regex=re.compile(expected_regex)\n self.expected_regex=expected_regex\n self.obj_name=None\n self.msg=None\n \n def handle(self,name,args,kwargs):\n ''\n\n\n\n\n \n try :\n if not _is_subtype(self.expected,self._base_type):\n raise TypeError('%s() arg 1 must be %s'%\n (name,self._base_type_str))\n if args and args[0]is None :\n warnings.warn(\"callable is None\",\n DeprecationWarning,3)\n args=()\n if not args:\n self.msg=kwargs.pop('msg',None )\n if kwargs:\n warnings.warn('%r is an invalid keyword argument for '\n 'this function'%next(iter(kwargs)),\n DeprecationWarning,3)\n return self\n \n callable_obj,*args=args\n try :\n self.obj_name=callable_obj.__name__\n except AttributeError:\n self.obj_name=str(callable_obj)\n with self:\n callable_obj(*args,**kwargs)\n finally :\n \n self=None\n \n \nclass _AssertRaisesContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=BaseException\n _base_type_str='an exception type or tuple of exception types'\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n if exc_type is None :\n try :\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n if self.obj_name:\n self._raiseFailure(\"{} not raised by {}\".format(exc_name,\n self.obj_name))\n else :\n self._raiseFailure(\"{} not raised\".format(exc_name))\n else :\n traceback.clear_frames(tb)\n if not issubclass(exc_type,self.expected):\n \n return False\n \n self.exception=exc_value.with_traceback(None )\n if self.expected_regex is None :\n return True\n \n expected_regex=self.expected_regex\n if not expected_regex.search(str(exc_value)):\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n expected_regex.pattern,str(exc_value)))\n return True\n \n \nclass _AssertWarnsContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=Warning\n _base_type_str='a warning type or tuple of warning types'\n \n def __enter__(self):\n \n \n for v in sys.modules.values():\n if getattr(v,'__warningregistry__',None ):\n v.__warningregistry__={}\n self.warnings_manager=warnings.catch_warnings(record=True )\n self.warnings=self.warnings_manager.__enter__()\n warnings.simplefilter(\"always\",self.expected)\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n self.warnings_manager.__exit__(exc_type,exc_value,tb)\n if exc_type is not None :\n \n return\n try :\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n first_matching=None\n for m in self.warnings:\n w=m.message\n if not isinstance(w,self.expected):\n continue\n if first_matching is None :\n first_matching=w\n if (self.expected_regex is not None and\n not self.expected_regex.search(str(w))):\n continue\n \n self.warning=w\n self.filename=m.filename\n self.lineno=m.lineno\n return\n \n if first_matching is not None :\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n self.expected_regex.pattern,str(first_matching)))\n if self.obj_name:\n self._raiseFailure(\"{} not triggered by {}\".format(exc_name,\n self.obj_name))\n else :\n self._raiseFailure(\"{} not triggered\".format(exc_name))\n \n \n \n_LoggingWatcher=collections.namedtuple(\"_LoggingWatcher\",\n[\"records\",\"output\"])\n\n\nclass _CapturingHandler(logging.Handler):\n ''\n\n \n \n def __init__(self):\n logging.Handler.__init__(self)\n self.watcher=_LoggingWatcher([],[])\n \n def flush(self):\n pass\n \n def emit(self,record):\n self.watcher.records.append(record)\n msg=self.format(record)\n self.watcher.output.append(msg)\n \n \n \nclass _AssertLogsContext(_BaseTestCaseContext):\n ''\n \n LOGGING_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n \n def __init__(self,test_case,logger_name,level):\n _BaseTestCaseContext.__init__(self,test_case)\n self.logger_name=logger_name\n if level:\n self.level=logging._nameToLevel.get(level,level)\n else :\n self.level=logging.INFO\n self.msg=None\n \n def __enter__(self):\n if isinstance(self.logger_name,logging.Logger):\n logger=self.logger=self.logger_name\n else :\n logger=self.logger=logging.getLogger(self.logger_name)\n formatter=logging.Formatter(self.LOGGING_FORMAT)\n handler=_CapturingHandler()\n handler.setFormatter(formatter)\n self.watcher=handler.watcher\n self.old_handlers=logger.handlers[:]\n self.old_level=logger.level\n self.old_propagate=logger.propagate\n logger.handlers=[handler]\n logger.setLevel(self.level)\n logger.propagate=False\n return handler.watcher\n \n def __exit__(self,exc_type,exc_value,tb):\n self.logger.handlers=self.old_handlers\n self.logger.propagate=self.old_propagate\n self.logger.setLevel(self.old_level)\n if exc_type is not None :\n \n return False\n if len(self.watcher.records)==0:\n self._raiseFailure(\n \"no logs of level {} or higher triggered on {}\"\n .format(logging.getLevelName(self.level),self.logger.name))\n \n \nclass _OrderedChainMap(collections.ChainMap):\n def __iter__(self):\n seen=set()\n for mapping in self.maps:\n for k in mapping:\n if k not in seen:\n seen.add(k)\n yield k\n \n \nclass TestCase(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n failureException=AssertionError\n \n longMessage=True\n \n maxDiff=80 *8\n \n \n \n _diffThreshold=2 **16\n \n \n \n _classSetupFailed=False\n \n def __init__(self,methodName='runTest'):\n ''\n\n\n \n self._testMethodName=methodName\n self._outcome=None\n self._testMethodDoc='No test'\n try :\n testMethod=getattr(self,methodName)\n except AttributeError:\n if methodName !='runTest':\n \n \n raise ValueError(\"no such test method in %s: %s\"%\n (self.__class__,methodName))\n else :\n self._testMethodDoc=testMethod.__doc__\n self._cleanups=[]\n self._subtest=None\n \n \n \n \n self._type_equality_funcs={}\n self.addTypeEqualityFunc(dict,'assertDictEqual')\n self.addTypeEqualityFunc(list,'assertListEqual')\n self.addTypeEqualityFunc(tuple,'assertTupleEqual')\n self.addTypeEqualityFunc(set,'assertSetEqual')\n self.addTypeEqualityFunc(frozenset,'assertSetEqual')\n self.addTypeEqualityFunc(str,'assertMultiLineEqual')\n \n def addTypeEqualityFunc(self,typeobj,function):\n ''\n\n\n\n\n\n\n\n\n\n\n \n self._type_equality_funcs[typeobj]=function\n \n def addCleanup(self,function,*args,**kwargs):\n ''\n\n\n\n \n self._cleanups.append((function,args,kwargs))\n \n def setUp(self):\n ''\n pass\n \n def tearDown(self):\n ''\n pass\n \n @classmethod\n def setUpClass(cls):\n ''\n \n @classmethod\n def tearDownClass(cls):\n ''\n \n def countTestCases(self):\n return 1\n \n def defaultTestResult(self):\n return result.TestResult()\n \n def shortDescription(self):\n ''\n\n\n\n\n \n doc=self._testMethodDoc\n return doc and doc.split(\"\\n\")[0].strip()or None\n \n \n def id(self):\n return \"%s.%s\"%(strclass(self.__class__),self._testMethodName)\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self._testMethodName ==other._testMethodName\n \n def __hash__(self):\n return hash((type(self),self._testMethodName))\n \n def __str__(self):\n return \"%s (%s)\"%(self._testMethodName,strclass(self.__class__))\n \n def __repr__(self):\n return \"<%s testMethod=%s>\"%\\\n (strclass(self.__class__),self._testMethodName)\n \n def _addSkip(self,result,test_case,reason):\n addSkip=getattr(result,'addSkip',None )\n if addSkip is not None :\n addSkip(test_case,reason)\n else :\n warnings.warn(\"TestResult has no addSkip method, skips not reported\",\n RuntimeWarning,2)\n result.addSuccess(test_case)\n \n @contextlib.contextmanager\n def subTest(self,msg=_subtest_msg_sentinel,**params):\n ''\n\n\n\n\n \n if not self._outcome.result_supports_subtests:\n yield\n return\n parent=self._subtest\n if parent is None :\n params_map=_OrderedChainMap(params)\n else :\n params_map=parent.params.new_child(params)\n self._subtest=_SubTest(self,msg,params_map)\n try :\n with self._outcome.testPartExecutor(self._subtest,isTest=True ):\n yield\n if not self._outcome.success:\n result=self._outcome.result\n if result is not None and result.failfast:\n raise _ShouldStop\n elif self._outcome.expectedFailure:\n \n \n raise _ShouldStop\n finally :\n self._subtest=parent\n \n def _feedErrorsToResult(self,result,errors):\n for test,exc_info in errors:\n if isinstance(test,_SubTest):\n result.addSubTest(test.test_case,test,exc_info)\n elif exc_info is not None :\n if issubclass(exc_info[0],self.failureException):\n result.addFailure(test,exc_info)\n else :\n result.addError(test,exc_info)\n \n def _addExpectedFailure(self,result,exc_info):\n try :\n addExpectedFailure=result.addExpectedFailure\n except AttributeError:\n warnings.warn(\"TestResult has no addExpectedFailure method, reporting as passes\",\n RuntimeWarning)\n result.addSuccess(self)\n else :\n addExpectedFailure(self,exc_info)\n \n def _addUnexpectedSuccess(self,result):\n try :\n addUnexpectedSuccess=result.addUnexpectedSuccess\n except AttributeError:\n warnings.warn(\"TestResult has no addUnexpectedSuccess method, reporting as failure\",\n RuntimeWarning)\n \n \n try :\n raise _UnexpectedSuccess from None\n except _UnexpectedSuccess:\n result.addFailure(self,sys.exc_info())\n else :\n addUnexpectedSuccess(self)\n \n def run(self,result=None ):\n orig_result=result\n if result is None :\n result=self.defaultTestResult()\n startTestRun=getattr(result,'startTestRun',None )\n if startTestRun is not None :\n startTestRun()\n \n result.startTest(self)\n \n testMethod=getattr(self,self._testMethodName)\n if (getattr(self.__class__,\"__unittest_skip__\",False )or\n getattr(testMethod,\"__unittest_skip__\",False )):\n \n try :\n skip_why=(getattr(self.__class__,'__unittest_skip_why__','')\n or getattr(testMethod,'__unittest_skip_why__',''))\n self._addSkip(result,self,skip_why)\n finally :\n result.stopTest(self)\n return\n expecting_failure_method=getattr(testMethod,\n \"__unittest_expecting_failure__\",False )\n expecting_failure_class=getattr(self,\n \"__unittest_expecting_failure__\",False )\n expecting_failure=expecting_failure_class or expecting_failure_method\n outcome=_Outcome(result)\n try :\n self._outcome=outcome\n \n with outcome.testPartExecutor(self):\n self.setUp()\n if outcome.success:\n outcome.expecting_failure=expecting_failure\n with outcome.testPartExecutor(self,isTest=True ):\n testMethod()\n outcome.expecting_failure=False\n with outcome.testPartExecutor(self):\n self.tearDown()\n \n self.doCleanups()\n for test,reason in outcome.skipped:\n self._addSkip(result,test,reason)\n self._feedErrorsToResult(result,outcome.errors)\n if outcome.success:\n if expecting_failure:\n if outcome.expectedFailure:\n self._addExpectedFailure(result,outcome.expectedFailure)\n else :\n self._addUnexpectedSuccess(result)\n else :\n result.addSuccess(self)\n return result\n finally :\n result.stopTest(self)\n if orig_result is None :\n stopTestRun=getattr(result,'stopTestRun',None )\n if stopTestRun is not None :\n stopTestRun()\n \n \n \n \n outcome.errors.clear()\n outcome.expectedFailure=None\n \n \n self._outcome=None\n \n def doCleanups(self):\n ''\n \n outcome=self._outcome or _Outcome()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop()\n with outcome.testPartExecutor(self):\n function(*args,**kwargs)\n \n \n \n return outcome.success\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n self.setUp()\n getattr(self,self._testMethodName)()\n self.tearDown()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop(-1)\n function(*args,**kwargs)\n \n def skipTest(self,reason):\n ''\n raise SkipTest(reason)\n \n def fail(self,msg=None ):\n ''\n raise self.failureException(msg)\n \n def assertFalse(self,expr,msg=None ):\n ''\n if expr:\n msg=self._formatMessage(msg,\"%s is not false\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def assertTrue(self,expr,msg=None ):\n ''\n if not expr:\n msg=self._formatMessage(msg,\"%s is not true\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def _formatMessage(self,msg,standardMsg):\n ''\n\n\n\n\n\n\n\n \n if not self.longMessage:\n return msg or standardMsg\n if msg is None :\n return standardMsg\n try :\n \n \n return '%s : %s'%(standardMsg,msg)\n except UnicodeDecodeError:\n return '%s : %s'%(safe_repr(standardMsg),safe_repr(msg))\n \n def assertRaises(self,expected_exception,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self)\n try :\n return context.handle('assertRaises',args,kwargs)\n finally :\n \n context=None\n \n def assertWarns(self,expected_warning,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self)\n return context.handle('assertWarns',args,kwargs)\n \n def assertLogs(self,logger=None ,level=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _AssertLogsContext(self,logger,level)\n \n def _getAssertEqualityFunc(self,first,second):\n ''\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n if type(first)is type(second):\n asserter=self._type_equality_funcs.get(type(first))\n if asserter is not None :\n if isinstance(asserter,str):\n asserter=getattr(self,asserter)\n return asserter\n \n return self._baseAssertEqual\n \n def _baseAssertEqual(self,first,second,msg=None ):\n ''\n if not first ==second:\n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertEqual(self,first,second,msg=None ):\n ''\n\n \n assertion_func=self._getAssertEqualityFunc(first,second)\n assertion_func(first,second,msg=msg)\n \n def assertNotEqual(self,first,second,msg=None ):\n ''\n\n \n if not first !=second:\n msg=self._formatMessage(msg,'%s == %s'%(safe_repr(first),\n safe_repr(second)))\n raise self.failureException(msg)\n \n def assertAlmostEqual(self,first,second,places=None ,msg=None ,\n delta=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if first ==second:\n \n return\n if delta is not None and places is not None :\n raise TypeError(\"specify delta or places not both\")\n \n diff=abs(first -second)\n if delta is not None :\n if diff <=delta:\n return\n \n standardMsg='%s != %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else :\n if places is None :\n places=7\n \n if round(diff,places)==0:\n return\n \n standardMsg='%s != %s within %r places (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n places,\n safe_repr(diff))\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotAlmostEqual(self,first,second,places=None ,msg=None ,\n delta=None ):\n ''\n\n\n\n\n\n\n\n\n \n if delta is not None and places is not None :\n raise TypeError(\"specify delta or places not both\")\n diff=abs(first -second)\n if delta is not None :\n if not (first ==second)and diff >delta:\n return\n standardMsg='%s == %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else :\n if places is None :\n places=7\n if not (first ==second)and round(diff,places)!=0:\n return\n standardMsg='%s == %s within %r places'%(safe_repr(first),\n safe_repr(second),\n places)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertSequenceEqual(self,seq1,seq2,msg=None ,seq_type=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if seq_type is not None :\n seq_type_name=seq_type.__name__\n if not isinstance(seq1,seq_type):\n raise self.failureException('First sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq1)))\n if not isinstance(seq2,seq_type):\n raise self.failureException('Second sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq2)))\n else :\n seq_type_name=\"sequence\"\n \n differing=None\n try :\n len1=len(seq1)\n except (TypeError,NotImplementedError):\n differing='First %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None :\n try :\n len2=len(seq2)\n except (TypeError,NotImplementedError):\n differing='Second %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None :\n if seq1 ==seq2:\n return\n \n differing='%ss differ: %s != %s\\n'%(\n (seq_type_name.capitalize(),)+\n _common_shorten_repr(seq1,seq2))\n \n for i in range(min(len1,len2)):\n try :\n item1=seq1[i]\n except (TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of first %s\\n'%\n (i,seq_type_name))\n break\n \n try :\n item2=seq2[i]\n except (TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of second %s\\n'%\n (i,seq_type_name))\n break\n \n if item1 !=item2:\n differing +=('\\nFirst differing element %d:\\n%s\\n%s\\n'%\n ((i,)+_common_shorten_repr(item1,item2)))\n break\n else :\n if (len1 ==len2 and seq_type is None and\n type(seq1)!=type(seq2)):\n \n return\n \n if len1 >len2:\n differing +=('\\nFirst %s contains %d additional '\n 'elements.\\n'%(seq_type_name,len1 -len2))\n try :\n differing +=('First extra element %d:\\n%s\\n'%\n (len2,safe_repr(seq1[len2])))\n except (TypeError,IndexError,NotImplementedError):\n differing +=('Unable to index element %d '\n 'of first %s\\n'%(len2,seq_type_name))\n elif len1 self._diffThreshold or\n len(second)>self._diffThreshold):\n self._baseAssertEqual(first,second,msg)\n firstlines=first.splitlines(keepends=True )\n secondlines=second.splitlines(keepends=True )\n if len(firstlines)==1 and first.strip('\\r\\n')==first:\n firstlines=[first+'\\n']\n secondlines=[second+'\\n']\n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n diff='\\n'+''.join(difflib.ndiff(firstlines,secondlines))\n standardMsg=self._truncateMessage(standardMsg,diff)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertLess(self,a,b,msg=None ):\n ''\n if not a b:\n standardMsg='%s not greater than %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertGreaterEqual(self,a,b,msg=None ):\n ''\n if not a >=b:\n standardMsg='%s not greater than or equal to %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNone(self,obj,msg=None ):\n ''\n if obj is not None :\n standardMsg='%s is not None'%(safe_repr(obj),)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNotNone(self,obj,msg=None ):\n ''\n if obj is None :\n standardMsg='unexpectedly None'\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsInstance(self,obj,cls,msg=None ):\n ''\n \n if not isinstance(obj,cls):\n standardMsg='%s is not an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertNotIsInstance(self,obj,cls,msg=None ):\n ''\n if isinstance(obj,cls):\n standardMsg='%s is an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertRaisesRegex(self,expected_exception,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self,expected_regex)\n return context.handle('assertRaisesRegex',args,kwargs)\n \n def assertWarnsRegex(self,expected_warning,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self,expected_regex)\n return context.handle('assertWarnsRegex',args,kwargs)\n \n def assertRegex(self,text,expected_regex,msg=None ):\n ''\n if isinstance(expected_regex,(str,bytes)):\n assert expected_regex,\"expected_regex must not be empty.\"\n expected_regex=re.compile(expected_regex)\n if not expected_regex.search(text):\n standardMsg=\"Regex didn't match: %r not found in %r\"%(\n expected_regex.pattern,text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotRegex(self,text,unexpected_regex,msg=None ):\n ''\n if isinstance(unexpected_regex,(str,bytes)):\n unexpected_regex=re.compile(unexpected_regex)\n match=unexpected_regex.search(text)\n if match:\n standardMsg='Regex matched: %r matches %r in %r'%(\n text[match.start():match.end()],\n unexpected_regex.pattern,\n text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n \n def _deprecate(original_func):\n def deprecated_func(*args,**kwargs):\n warnings.warn(\n 'Please use {0} instead.'.format(original_func.__name__),\n DeprecationWarning,2)\n return original_func(*args,**kwargs)\n return deprecated_func\n \n \n failUnlessEqual=assertEquals=_deprecate(assertEqual)\n failIfEqual=assertNotEquals=_deprecate(assertNotEqual)\n failUnlessAlmostEqual=assertAlmostEquals=_deprecate(assertAlmostEqual)\n failIfAlmostEqual=assertNotAlmostEquals=_deprecate(assertNotAlmostEqual)\n failUnless=assert_=_deprecate(assertTrue)\n failUnlessRaises=_deprecate(assertRaises)\n failIf=_deprecate(assertFalse)\n assertRaisesRegexp=_deprecate(assertRaisesRegex)\n assertRegexpMatches=_deprecate(assertRegex)\n assertNotRegexpMatches=_deprecate(assertNotRegex)\n \n \n \nclass FunctionTestCase(TestCase):\n ''\n\n\n\n\n\n \n \n def __init__(self,testFunc,setUp=None ,tearDown=None ,description=None ):\n super(FunctionTestCase,self).__init__()\n self._setUpFunc=setUp\n self._tearDownFunc=tearDown\n self._testFunc=testFunc\n self._description=description\n \n def setUp(self):\n if self._setUpFunc is not None :\n self._setUpFunc()\n \n def tearDown(self):\n if self._tearDownFunc is not None :\n self._tearDownFunc()\n \n def runTest(self):\n self._testFunc()\n \n def id(self):\n return self._testFunc.__name__\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n \n return self._setUpFunc ==other._setUpFunc and\\\n self._tearDownFunc ==other._tearDownFunc and\\\n self._testFunc ==other._testFunc and\\\n self._description ==other._description\n \n def __hash__(self):\n return hash((type(self),self._setUpFunc,self._tearDownFunc,\n self._testFunc,self._description))\n \n def __str__(self):\n return \"%s (%s)\"%(strclass(self.__class__),\n self._testFunc.__name__)\n \n def __repr__(self):\n return \"<%s tec=%s>\"%(strclass(self.__class__),\n self._testFunc)\n \n def shortDescription(self):\n if self._description is not None :\n return self._description\n doc=self._testFunc.__doc__\n return doc and doc.split(\"\\n\")[0].strip()or None\n \n \nclass _SubTest(TestCase):\n\n def __init__(self,test_case,message,params):\n super().__init__()\n self._message=message\n self.test_case=test_case\n self.params=params\n self.failureException=test_case.failureException\n \n def runTest(self):\n raise NotImplementedError(\"subtests cannot be run directly\")\n \n def _subDescription(self):\n parts=[]\n if self._message is not _subtest_msg_sentinel:\n parts.append(\"[{}]\".format(self._message))\n if self.params:\n params_desc=', '.join(\n \"{}={!r}\".format(k,v)\n for (k,v)in self.params.items())\n parts.append(\"({})\".format(params_desc))\n return \" \".join(parts)or '()'\n \n def id(self):\n return \"{} {}\".format(self.test_case.id(),self._subDescription())\n \n def shortDescription(self):\n ''\n\n \n return self.test_case.shortDescription()\n \n def __str__(self):\n return \"{} {}\".format(self.test_case,self._subDescription())\n", ["collections", "contextlib", "difflib", "functools", "logging", "pprint", "re", "sys", "traceback", "unittest", "unittest.result", "unittest.util", "warnings"]], "unittest.loader": [".py", "''\n\nimport os\nimport re\nimport sys\nimport traceback\nimport types\nimport functools\nimport warnings\n\nfrom fnmatch import fnmatch,fnmatchcase\n\nfrom . import case,suite,util\n\n__unittest=True\n\n\n\n\nVALID_MODULE_NAME=re.compile(r'[_a-z]\\w*\\.py$',re.IGNORECASE)\n\n\nclass _FailedTest(case.TestCase):\n _testMethodName=None\n \n def __init__(self,method_name,exception):\n self._exception=exception\n super(_FailedTest,self).__init__(method_name)\n \n def __getattr__(self,name):\n if name !=self._testMethodName:\n return super(_FailedTest,self).__getattr__(name)\n def testFailure():\n raise self._exception\n return testFailure\n \n \ndef _make_failed_import_test(name,suiteClass):\n message='Failed to import test module: %s\\n%s'%(\n name,traceback.format_exc())\n return _make_failed_test(name,ImportError(message),suiteClass,message)\n \ndef _make_failed_load_tests(name,exception,suiteClass):\n message='Failed to call load_tests:\\n%s'%(traceback.format_exc(),)\n return _make_failed_test(\n name,exception,suiteClass,message)\n \ndef _make_failed_test(methodname,exception,suiteClass,message):\n test=_FailedTest(methodname,exception)\n return suiteClass((test,)),message\n \ndef _make_skipped_test(methodname,exception,suiteClass):\n @case.skip(str(exception))\n def testSkipped(self):\n pass\n attrs={methodname:testSkipped}\n TestClass=type(\"ModuleSkipped\",(case.TestCase,),attrs)\n return suiteClass((TestClass(methodname),))\n \ndef _jython_aware_splitext(path):\n if path.lower().endswith('$py.class'):\n return path[:-9]\n return os.path.splitext(path)[0]\n \n \nclass TestLoader(object):\n ''\n\n\n \n testMethodPrefix='test'\n sortTestMethodsUsing=staticmethod(util.three_way_cmp)\n testNamePatterns=None\n suiteClass=suite.TestSuite\n _top_level_dir=None\n \n def __init__(self):\n super(TestLoader,self).__init__()\n self.errors=[]\n \n \n self._loading_packages=set()\n \n def loadTestsFromTestCase(self,testCaseClass):\n ''\n if issubclass(testCaseClass,suite.TestSuite):\n raise TypeError(\"Test cases should not be derived from \"\n \"TestSuite. Maybe you meant to derive from \"\n \"TestCase?\")\n testCaseNames=self.getTestCaseNames(testCaseClass)\n if not testCaseNames and hasattr(testCaseClass,'runTest'):\n testCaseNames=['runTest']\n loaded_suite=self.suiteClass(map(testCaseClass,testCaseNames))\n return loaded_suite\n \n \n \n def loadTestsFromModule(self,module,*args,pattern=None ,**kws):\n ''\n \n \n \n \n if len(args)>0 or 'use_load_tests'in kws:\n warnings.warn('use_load_tests is deprecated and ignored',\n DeprecationWarning)\n kws.pop('use_load_tests',None )\n if len(args)>1:\n \n \n complaint=len(args)+1\n raise TypeError('loadTestsFromModule() takes 1 positional argument but {} were given'.format(complaint))\n if len(kws)!=0:\n \n \n \n \n complaint=sorted(kws)[0]\n raise TypeError(\"loadTestsFromModule() got an unexpected keyword argument '{}'\".format(complaint))\n tests=[]\n for name in dir(module):\n obj=getattr(module,name)\n if isinstance(obj,type)and issubclass(obj,case.TestCase):\n tests.append(self.loadTestsFromTestCase(obj))\n \n load_tests=getattr(module,'load_tests',None )\n tests=self.suiteClass(tests)\n if load_tests is not None :\n try :\n return load_tests(self,tests,pattern)\n except Exception as e:\n error_case,error_message=_make_failed_load_tests(\n module.__name__,e,self.suiteClass)\n self.errors.append(error_message)\n return error_case\n return tests\n \n def loadTestsFromName(self,name,module=None ):\n ''\n\n\n\n\n\n\n \n parts=name.split('.')\n error_case,error_message=None ,None\n if module is None :\n parts_copy=parts[:]\n while parts_copy:\n try :\n module_name='.'.join(parts_copy)\n module=__import__(module_name)\n break\n except ImportError:\n next_attribute=parts_copy.pop()\n \n error_case,error_message=_make_failed_import_test(\n next_attribute,self.suiteClass)\n if not parts_copy:\n \n self.errors.append(error_message)\n return error_case\n parts=parts[1:]\n obj=module\n for part in parts:\n try :\n parent,obj=obj,getattr(obj,part)\n except AttributeError as e:\n \n if (getattr(obj,'__path__',None )is not None\n and error_case is not None ):\n \n \n \n \n \n self.errors.append(error_message)\n return error_case\n else :\n \n error_case,error_message=_make_failed_test(\n part,e,self.suiteClass,\n 'Failed to access attribute:\\n%s'%(\n traceback.format_exc(),))\n self.errors.append(error_message)\n return error_case\n \n if isinstance(obj,types.ModuleType):\n return self.loadTestsFromModule(obj)\n elif isinstance(obj,type)and issubclass(obj,case.TestCase):\n return self.loadTestsFromTestCase(obj)\n elif (isinstance(obj,types.FunctionType)and\n isinstance(parent,type)and\n issubclass(parent,case.TestCase)):\n name=parts[-1]\n inst=parent(name)\n \n if not isinstance(getattr(inst,name),types.FunctionType):\n return self.suiteClass([inst])\n elif isinstance(obj,suite.TestSuite):\n return obj\n if callable(obj):\n test=obj()\n if isinstance(test,suite.TestSuite):\n return test\n elif isinstance(test,case.TestCase):\n return self.suiteClass([test])\n else :\n raise TypeError(\"calling %s returned %s, not a test\"%\n (obj,test))\n else :\n raise TypeError(\"don't know how to make test from: %s\"%obj)\n \n def loadTestsFromNames(self,names,module=None ):\n ''\n\n \n suites=[self.loadTestsFromName(name,module)for name in names]\n return self.suiteClass(suites)\n \n def getTestCaseNames(self,testCaseClass):\n ''\n \n def shouldIncludeMethod(attrname):\n if not attrname.startswith(self.testMethodPrefix):\n return False\n testFunc=getattr(testCaseClass,attrname)\n if not callable(testFunc):\n return False\n fullName='%s.%s'%(testCaseClass.__module__,testFunc.__qualname__)\n return self.testNamePatterns is None or\\\n any(fnmatchcase(fullName,pattern)for pattern in self.testNamePatterns)\n testFnNames=list(filter(shouldIncludeMethod,dir(testCaseClass)))\n if self.sortTestMethodsUsing:\n testFnNames.sort(key=functools.cmp_to_key(self.sortTestMethodsUsing))\n return testFnNames\n \n def discover(self,start_dir,pattern='test*.py',top_level_dir=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n set_implicit_top=False\n if top_level_dir is None and self._top_level_dir is not None :\n \n top_level_dir=self._top_level_dir\n elif top_level_dir is None :\n set_implicit_top=True\n top_level_dir=start_dir\n \n top_level_dir=os.path.abspath(top_level_dir)\n \n if not top_level_dir in sys.path:\n \n \n \n \n sys.path.insert(0,top_level_dir)\n self._top_level_dir=top_level_dir\n \n is_not_importable=False\n is_namespace=False\n tests=[]\n if os.path.isdir(os.path.abspath(start_dir)):\n start_dir=os.path.abspath(start_dir)\n if start_dir !=top_level_dir:\n is_not_importable=not os.path.isfile(os.path.join(start_dir,'__init__.py'))\n else :\n \n try :\n __import__(start_dir)\n except ImportError:\n is_not_importable=True\n else :\n the_module=sys.modules[start_dir]\n top_part=start_dir.split('.')[0]\n try :\n start_dir=os.path.abspath(\n os.path.dirname((the_module.__file__)))\n except AttributeError:\n \n try :\n spec=the_module.__spec__\n except AttributeError:\n spec=None\n \n if spec and spec.loader is None :\n if spec.submodule_search_locations is not None :\n is_namespace=True\n \n for path in the_module.__path__:\n if (not set_implicit_top and\n not path.startswith(top_level_dir)):\n continue\n self._top_level_dir=\\\n (path.split(the_module.__name__\n .replace(\".\",os.path.sep))[0])\n tests.extend(self._find_tests(path,\n pattern,\n namespace=True ))\n elif the_module.__name__ in sys.builtin_module_names:\n \n raise TypeError('Can not use builtin modules '\n 'as dotted module names')from None\n else :\n raise TypeError(\n 'don\\'t know how to discover from {!r}'\n .format(the_module))from None\n \n if set_implicit_top:\n if not is_namespace:\n self._top_level_dir=\\\n self._get_directory_containing_module(top_part)\n sys.path.remove(top_level_dir)\n else :\n sys.path.remove(top_level_dir)\n \n if is_not_importable:\n raise ImportError('Start directory is not importable: %r'%start_dir)\n \n if not is_namespace:\n tests=list(self._find_tests(start_dir,pattern))\n return self.suiteClass(tests)\n \n def _get_directory_containing_module(self,module_name):\n module=sys.modules[module_name]\n full_path=os.path.abspath(module.__file__)\n \n if os.path.basename(full_path).lower().startswith('__init__.py'):\n return os.path.dirname(os.path.dirname(full_path))\n else :\n \n \n \n return os.path.dirname(full_path)\n \n def _get_name_from_path(self,path):\n if path ==self._top_level_dir:\n return '.'\n path=_jython_aware_splitext(os.path.normpath(path))\n \n _relpath=os.path.relpath(path,self._top_level_dir)\n assert not os.path.isabs(_relpath),\"Path must be within the project\"\n assert not _relpath.startswith('..'),\"Path must be within the project\"\n \n name=_relpath.replace(os.path.sep,'.')\n return name\n \n def _get_module_from_name(self,name):\n __import__(name)\n return sys.modules[name]\n \n def _match_path(self,path,full_path,pattern):\n \n return fnmatch(path,pattern)\n \n def _find_tests(self,start_dir,pattern,namespace=False ):\n ''\n \n name=self._get_name_from_path(start_dir)\n \n \n if name !='.'and name not in self._loading_packages:\n \n \n tests,should_recurse=self._find_test_path(\n start_dir,pattern,namespace)\n if tests is not None :\n yield tests\n if not should_recurse:\n \n \n return\n \n paths=sorted(os.listdir(start_dir))\n for path in paths:\n full_path=os.path.join(start_dir,path)\n tests,should_recurse=self._find_test_path(\n full_path,pattern,namespace)\n if tests is not None :\n yield tests\n if should_recurse:\n \n name=self._get_name_from_path(full_path)\n self._loading_packages.add(name)\n try :\n yield from self._find_tests(full_path,pattern,namespace)\n finally :\n self._loading_packages.discard(name)\n \n def _find_test_path(self,full_path,pattern,namespace=False ):\n ''\n\n\n\n\n\n \n basename=os.path.basename(full_path)\n if os.path.isfile(full_path):\n if not VALID_MODULE_NAME.match(basename):\n \n return None ,False\n if not self._match_path(basename,full_path,pattern):\n return None ,False\n \n name=self._get_name_from_path(full_path)\n try :\n module=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except :\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else :\n mod_file=os.path.abspath(\n getattr(module,'__file__',full_path))\n realpath=_jython_aware_splitext(\n os.path.realpath(mod_file))\n fullpath_noext=_jython_aware_splitext(\n os.path.realpath(full_path))\n if realpath.lower()!=fullpath_noext.lower():\n module_dir=os.path.dirname(realpath)\n mod_name=_jython_aware_splitext(\n os.path.basename(full_path))\n expected_dir=os.path.dirname(full_path)\n msg=(\"%r module incorrectly imported from %r. Expected \"\n \"%r. Is this module globally installed?\")\n raise ImportError(\n msg %(mod_name,module_dir,expected_dir))\n return self.loadTestsFromModule(module,pattern=pattern),False\n elif os.path.isdir(full_path):\n if (not namespace and\n not os.path.isfile(os.path.join(full_path,'__init__.py'))):\n return None ,False\n \n load_tests=None\n tests=None\n name=self._get_name_from_path(full_path)\n try :\n package=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except :\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else :\n load_tests=getattr(package,'load_tests',None )\n \n self._loading_packages.add(name)\n try :\n tests=self.loadTestsFromModule(package,pattern=pattern)\n if load_tests is not None :\n \n return tests,False\n return tests,True\n finally :\n self._loading_packages.discard(name)\n else :\n return None ,False\n \n \ndefaultTestLoader=TestLoader()\n\n\ndef _makeLoader(prefix,sortUsing,suiteClass=None ,testNamePatterns=None ):\n loader=TestLoader()\n loader.sortTestMethodsUsing=sortUsing\n loader.testMethodPrefix=prefix\n loader.testNamePatterns=testNamePatterns\n if suiteClass:\n loader.suiteClass=suiteClass\n return loader\n \ndef getTestCaseNames(testCaseClass,prefix,sortUsing=util.three_way_cmp,testNamePatterns=None ):\n return _makeLoader(prefix,sortUsing,testNamePatterns=testNamePatterns).getTestCaseNames(testCaseClass)\n \ndef makeSuite(testCaseClass,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromTestCase(\n testCaseClass)\n \ndef findTestCases(module,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromModule(\\\n module)\n", ["fnmatch", "functools", "os", "re", "sys", "traceback", "types", "unittest", "unittest.case", "unittest.suite", "unittest.util", "warnings"]], "unittest.main": [".py", "''\n\nimport sys\nimport argparse\nimport os\n\nfrom . import loader,runner\nfrom .signals import installHandler\n\n__unittest=True\n\nMAIN_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s test_module - run tests from test_module\n %(prog)s module.TestClass - run tests from module.TestClass\n %(prog)s module.Class.test_method - run specified test method\n %(prog)s path/to/test_file.py - run tests from test_file.py\n\"\"\"\n\nMODULE_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s - run default set of tests\n %(prog)s MyTestSuite - run suite 'MyTestSuite'\n %(prog)s MyTestCase.testSomething - run MyTestCase.testSomething\n %(prog)s MyTestCase - run all 'test*' test methods\n in MyTestCase\n\"\"\"\n\ndef _convert_name(name):\n\n\n\n\n if os.path.isfile(name)and name.lower().endswith('.py'):\n if os.path.isabs(name):\n rel_path=os.path.relpath(name,os.getcwd())\n if os.path.isabs(rel_path)or rel_path.startswith(os.pardir):\n return name\n name=rel_path\n \n \n return name[:-3].replace('\\\\','.').replace('/','.')\n return name\n \ndef _convert_names(names):\n return [_convert_name(name)for name in names]\n \n \ndef _convert_select_pattern(pattern):\n if not '*'in pattern:\n pattern='*%s*'%pattern\n return pattern\n \n \nclass TestProgram(object):\n ''\n\n \n \n module=None\n verbosity=1\n failfast=catchbreak=buffer=progName=warnings=testNamePatterns=None\n _discovery_parser=None\n \n def __init__(self,module='__main__',defaultTest=None ,argv=None ,\n testRunner=None ,testLoader=loader.defaultTestLoader,\n exit=True ,verbosity=1,failfast=None ,catchbreak=None ,\n buffer=None ,warnings=None ,*,tb_locals=False ):\n if isinstance(module,str):\n self.module=__import__(module)\n for part in module.split('.')[1:]:\n self.module=getattr(self.module,part)\n else :\n self.module=module\n if argv is None :\n argv=sys.argv\n \n self.exit=exit\n self.failfast=failfast\n self.catchbreak=catchbreak\n self.verbosity=verbosity\n self.buffer=buffer\n self.tb_locals=tb_locals\n if warnings is None and not sys.warnoptions:\n \n \n \n self.warnings='default'\n else :\n \n \n \n \n \n self.warnings=warnings\n self.defaultTest=defaultTest\n self.testRunner=testRunner\n self.testLoader=testLoader\n self.progName=os.path.basename(argv[0])\n self.parseArgs(argv)\n self.runTests()\n \n def usageExit(self,msg=None ):\n if msg:\n print(msg)\n if self._discovery_parser is None :\n self._initArgParsers()\n self._print_help()\n sys.exit(2)\n \n def _print_help(self,*args,**kwargs):\n if self.module is None :\n print(self._main_parser.format_help())\n print(MAIN_EXAMPLES %{'prog':self.progName})\n self._discovery_parser.print_help()\n else :\n print(self._main_parser.format_help())\n print(MODULE_EXAMPLES %{'prog':self.progName})\n \n def parseArgs(self,argv):\n self._initArgParsers()\n if self.module is None :\n if len(argv)>1 and argv[1].lower()=='discover':\n self._do_discovery(argv[2:])\n return\n self._main_parser.parse_args(argv[1:],self)\n if not self.tests:\n \n \n self._do_discovery([])\n return\n else :\n self._main_parser.parse_args(argv[1:],self)\n \n if self.tests:\n self.testNames=_convert_names(self.tests)\n if __name__ =='__main__':\n \n self.module=None\n elif self.defaultTest is None :\n \n self.testNames=None\n elif isinstance(self.defaultTest,str):\n self.testNames=(self.defaultTest,)\n else :\n self.testNames=list(self.defaultTest)\n self.createTests()\n \n def createTests(self,from_discovery=False ,Loader=None ):\n if self.testNamePatterns:\n self.testLoader.testNamePatterns=self.testNamePatterns\n if from_discovery:\n loader=self.testLoader if Loader is None else Loader()\n self.test=loader.discover(self.start,self.pattern,self.top)\n elif self.testNames is None :\n self.test=self.testLoader.loadTestsFromModule(self.module)\n else :\n self.test=self.testLoader.loadTestsFromNames(self.testNames,\n self.module)\n \n def _initArgParsers(self):\n parent_parser=self._getParentArgParser()\n self._main_parser=self._getMainArgParser(parent_parser)\n self._discovery_parser=self._getDiscoveryArgParser(parent_parser)\n \n def _getParentArgParser(self):\n parser=argparse.ArgumentParser(add_help=False )\n \n parser.add_argument('-v','--verbose',dest='verbosity',\n action='store_const',const=2,\n help='Verbose output')\n parser.add_argument('-q','--quiet',dest='verbosity',\n action='store_const',const=0,\n help='Quiet output')\n parser.add_argument('--locals',dest='tb_locals',\n action='store_true',\n help='Show local variables in tracebacks')\n if self.failfast is None :\n parser.add_argument('-f','--failfast',dest='failfast',\n action='store_true',\n help='Stop on first fail or error')\n self.failfast=False\n if self.catchbreak is None :\n parser.add_argument('-c','--catch',dest='catchbreak',\n action='store_true',\n help='Catch Ctrl-C and display results so far')\n self.catchbreak=False\n if self.buffer is None :\n parser.add_argument('-b','--buffer',dest='buffer',\n action='store_true',\n help='Buffer stdout and stderr during tests')\n self.buffer=False\n if self.testNamePatterns is None :\n parser.add_argument('-k',dest='testNamePatterns',\n action='append',type=_convert_select_pattern,\n help='Only run tests which match the given substring')\n self.testNamePatterns=[]\n \n return parser\n \n def _getMainArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog=self.progName\n parser.print_help=self._print_help\n \n parser.add_argument('tests',nargs='*',\n help='a list of any number of test modules, '\n 'classes and test methods.')\n \n return parser\n \n def _getDiscoveryArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog='%s discover'%self.progName\n parser.epilog=('For test discovery all test modules must be '\n 'importable from the top level directory of the '\n 'project.')\n \n parser.add_argument('-s','--start-directory',dest='start',\n help=\"Directory to start discovery ('.' default)\")\n parser.add_argument('-p','--pattern',dest='pattern',\n help=\"Pattern to match tests ('test*.py' default)\")\n parser.add_argument('-t','--top-level-directory',dest='top',\n help='Top level directory of project (defaults to '\n 'start directory)')\n for arg in ('start','pattern','top'):\n parser.add_argument(arg,nargs='?',\n default=argparse.SUPPRESS,\n help=argparse.SUPPRESS)\n \n return parser\n \n def _do_discovery(self,argv,Loader=None ):\n self.start='.'\n self.pattern='test*.py'\n self.top=None\n if argv is not None :\n \n if self._discovery_parser is None :\n \n self._initArgParsers()\n self._discovery_parser.parse_args(argv,self)\n \n self.createTests(from_discovery=True ,Loader=Loader)\n \n def runTests(self):\n if self.catchbreak:\n installHandler()\n if self.testRunner is None :\n self.testRunner=runner.TextTestRunner\n if isinstance(self.testRunner,type):\n try :\n try :\n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings,\n tb_locals=self.tb_locals)\n except TypeError:\n \n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings)\n except TypeError:\n \n testRunner=self.testRunner()\n else :\n \n testRunner=self.testRunner\n self.result=testRunner.run(self.test)\n if self.exit:\n sys.exit(not self.result.wasSuccessful())\n \nmain=TestProgram\n", ["argparse", "os", "sys", "unittest", "unittest.loader", "unittest.runner", "unittest.signals"]], "unittest.mock": [".py", "\n\n\n\n\n\n__all__=(\n'Mock',\n'MagicMock',\n'patch',\n'sentinel',\n'DEFAULT',\n'ANY',\n'call',\n'create_autospec',\n'FILTER_DIR',\n'NonCallableMock',\n'NonCallableMagicMock',\n'mock_open',\n'PropertyMock',\n'seal',\n)\n\n\n__version__='1.0'\n\n\nimport inspect\nimport pprint\nimport sys\nimport builtins\nfrom types import ModuleType\nfrom functools import wraps,partial\n\n\n_builtins={name for name in dir(builtins)if not name.startswith('_')}\n\nBaseExceptions=(BaseException,)\nif 'java'in sys.platform:\n\n import java\n BaseExceptions=(BaseException,java.lang.Throwable)\n \n \nFILTER_DIR=True\n\n\n\n_safe_super=super\n\ndef _is_instance_mock(obj):\n\n\n return issubclass(type(obj),NonCallableMock)\n \n \ndef _is_exception(obj):\n return (\n isinstance(obj,BaseExceptions)or\n isinstance(obj,type)and issubclass(obj,BaseExceptions)\n )\n \n \ndef _get_signature_object(func,as_instance,eat_self):\n ''\n\n\n\n \n if isinstance(func,type)and not as_instance:\n \n try :\n func=func.__init__\n except AttributeError:\n return None\n \n eat_self=True\n elif not isinstance(func,FunctionTypes):\n \n \n try :\n func=func.__call__\n except AttributeError:\n return None\n if eat_self:\n sig_func=partial(func,None )\n else :\n sig_func=func\n try :\n return func,inspect.signature(sig_func)\n except ValueError:\n \n return None\n \n \ndef _check_signature(func,mock,skipfirst,instance=False ):\n sig=_get_signature_object(func,instance,skipfirst)\n if sig is None :\n return\n func,sig=sig\n def checksig(_mock_self,*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n type(mock)._mock_check_sig=checksig\n \n \ndef _copy_func_details(func,funcopy):\n\n\n for attribute in (\n '__name__','__doc__','__text_signature__',\n '__module__','__defaults__','__kwdefaults__',\n ):\n try :\n setattr(funcopy,attribute,getattr(func,attribute))\n except AttributeError:\n pass\n \n \ndef _callable(obj):\n if isinstance(obj,type):\n return True\n if getattr(obj,'__call__',None )is not None :\n return True\n return False\n \n \ndef _is_list(obj):\n\n\n return type(obj)in (list,tuple)\n \n \ndef _instance_callable(obj):\n ''\n \n if not isinstance(obj,type):\n \n return getattr(obj,'__call__',None )is not None\n \n \n \n for base in (obj,)+obj.__mro__:\n if base.__dict__.get('__call__')is not None :\n return True\n return False\n \n \ndef _set_signature(mock,original,instance=False ):\n\n\n\n if not _callable(original):\n return\n \n skipfirst=isinstance(original,type)\n result=_get_signature_object(original,instance,skipfirst)\n if result is None :\n return mock\n func,sig=result\n def checksig(*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n \n name=original.__name__\n if not name.isidentifier():\n name='funcopy'\n context={'_checksig_':checksig,'mock':mock}\n src=\"\"\"def %s(*args, **kwargs):\n _checksig_(*args, **kwargs)\n return mock(*args, **kwargs)\"\"\"%name\n exec(src,context)\n funcopy=context[name]\n _setup_func(funcopy,mock)\n return funcopy\n \n \ndef _setup_func(funcopy,mock):\n funcopy.mock=mock\n \n \n if not _is_instance_mock(mock):\n return\n \n def assert_called_with(*args,**kwargs):\n return mock.assert_called_with(*args,**kwargs)\n def assert_called(*args,**kwargs):\n return mock.assert_called(*args,**kwargs)\n def assert_not_called(*args,**kwargs):\n return mock.assert_not_called(*args,**kwargs)\n def assert_called_once(*args,**kwargs):\n return mock.assert_called_once(*args,**kwargs)\n def assert_called_once_with(*args,**kwargs):\n return mock.assert_called_once_with(*args,**kwargs)\n def assert_has_calls(*args,**kwargs):\n return mock.assert_has_calls(*args,**kwargs)\n def assert_any_call(*args,**kwargs):\n return mock.assert_any_call(*args,**kwargs)\n def reset_mock():\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n mock.reset_mock()\n ret=funcopy.return_value\n if _is_instance_mock(ret)and not ret is mock:\n ret.reset_mock()\n \n funcopy.called=False\n funcopy.call_count=0\n funcopy.call_args=None\n funcopy.call_args_list=_CallList()\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n \n funcopy.return_value=mock.return_value\n funcopy.side_effect=mock.side_effect\n funcopy._mock_children=mock._mock_children\n \n funcopy.assert_called_with=assert_called_with\n funcopy.assert_called_once_with=assert_called_once_with\n funcopy.assert_has_calls=assert_has_calls\n funcopy.assert_any_call=assert_any_call\n funcopy.reset_mock=reset_mock\n funcopy.assert_called=assert_called\n funcopy.assert_not_called=assert_not_called\n funcopy.assert_called_once=assert_called_once\n \n mock._mock_delegate=funcopy\n \n \ndef _is_magic(name):\n return '__%s__'%name[2:-2]==name\n \n \nclass _SentinelObject(object):\n ''\n def __init__(self,name):\n self.name=name\n \n def __repr__(self):\n return 'sentinel.%s'%self.name\n \n def __reduce__(self):\n return 'sentinel.%s'%self.name\n \n \nclass _Sentinel(object):\n ''\n def __init__(self):\n self._sentinels={}\n \n def __getattr__(self,name):\n if name =='__bases__':\n \n raise AttributeError\n return self._sentinels.setdefault(name,_SentinelObject(name))\n \n def __reduce__(self):\n return 'sentinel'\n \n \nsentinel=_Sentinel()\n\nDEFAULT=sentinel.DEFAULT\n_missing=sentinel.MISSING\n_deleted=sentinel.DELETED\n\n\ndef _copy(value):\n if type(value)in (dict,list,tuple,set):\n return type(value)(value)\n return value\n \n \n_allowed_names={\n'return_value','_mock_return_value','side_effect',\n'_mock_side_effect','_mock_parent','_mock_new_parent',\n'_mock_name','_mock_new_name'\n}\n\n\ndef _delegating_property(name):\n _allowed_names.add(name)\n _the_name='_mock_'+name\n def _get(self,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None :\n return getattr(self,_the_name)\n return getattr(sig,name)\n def _set(self,value,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None :\n self.__dict__[_the_name]=value\n else :\n setattr(sig,name,value)\n \n return property(_get,_set)\n \n \n \nclass _CallList(list):\n\n def __contains__(self,value):\n if not isinstance(value,list):\n return list.__contains__(self,value)\n len_value=len(value)\n len_self=len(self)\n if len_value >len_self:\n return False\n \n for i in range(0,len_self -len_value+1):\n sub_list=self[i:i+len_value]\n if sub_list ==value:\n return True\n return False\n \n def __repr__(self):\n return pprint.pformat(list(self))\n \n \ndef _check_and_set_parent(parent,value,name,new_name):\n if not _is_instance_mock(value):\n return False\n if ((value._mock_name or value._mock_new_name)or\n (value._mock_parent is not None )or\n (value._mock_new_parent is not None )):\n return False\n \n _parent=parent\n while _parent is not None :\n \n \n if _parent is value:\n return False\n _parent=_parent._mock_new_parent\n \n if new_name:\n value._mock_new_parent=parent\n value._mock_new_name=new_name\n if name:\n value._mock_parent=parent\n value._mock_name=name\n return True\n \n \nclass _MockIter(object):\n def __init__(self,obj):\n self.obj=iter(obj)\n def __iter__(self):\n return self\n def __next__(self):\n return next(self.obj)\n \nclass Base(object):\n _mock_return_value=DEFAULT\n _mock_side_effect=None\n def __init__(self,*args,**kwargs):\n pass\n \n \n \nclass NonCallableMock(Base):\n ''\n \n def __new__(cls,*args,**kw):\n \n \n \n new=type(cls.__name__,(cls,),{'__doc__':cls.__doc__})\n instance=object.__new__(new)\n return instance\n \n \n def __init__(\n self,spec=None ,wraps=None ,name=None ,spec_set=None ,\n parent=None ,_spec_state=None ,_new_name='',_new_parent=None ,\n _spec_as_instance=False ,_eat_self=None ,unsafe=False ,**kwargs\n ):\n if _new_parent is None :\n _new_parent=parent\n \n __dict__=self.__dict__\n __dict__['_mock_parent']=parent\n __dict__['_mock_name']=name\n __dict__['_mock_new_name']=_new_name\n __dict__['_mock_new_parent']=_new_parent\n __dict__['_mock_sealed']=False\n \n if spec_set is not None :\n spec=spec_set\n spec_set=True\n if _eat_self is None :\n _eat_self=parent is not None\n \n self._mock_add_spec(spec,spec_set,_spec_as_instance,_eat_self)\n \n __dict__['_mock_children']={}\n __dict__['_mock_wraps']=wraps\n __dict__['_mock_delegate']=None\n \n __dict__['_mock_called']=False\n __dict__['_mock_call_args']=None\n __dict__['_mock_call_count']=0\n __dict__['_mock_call_args_list']=_CallList()\n __dict__['_mock_mock_calls']=_CallList()\n \n __dict__['method_calls']=_CallList()\n __dict__['_mock_unsafe']=unsafe\n \n if kwargs:\n self.configure_mock(**kwargs)\n \n _safe_super(NonCallableMock,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state\n )\n \n \n def attach_mock(self,mock,attribute):\n ''\n\n\n \n mock._mock_parent=None\n mock._mock_new_parent=None\n mock._mock_name=''\n mock._mock_new_name=None\n \n setattr(self,attribute,mock)\n \n \n def mock_add_spec(self,spec,spec_set=False ):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n \n \n def _mock_add_spec(self,spec,spec_set,_spec_as_instance=False ,\n _eat_self=False ):\n _spec_class=None\n _spec_signature=None\n \n if spec is not None and not _is_list(spec):\n if isinstance(spec,type):\n _spec_class=spec\n else :\n _spec_class=_get_class(spec)\n res=_get_signature_object(spec,\n _spec_as_instance,_eat_self)\n _spec_signature=res and res[1]\n \n spec=dir(spec)\n \n __dict__=self.__dict__\n __dict__['_spec_class']=_spec_class\n __dict__['_spec_set']=spec_set\n __dict__['_spec_signature']=_spec_signature\n __dict__['_mock_methods']=spec\n \n \n def __get_return_value(self):\n ret=self._mock_return_value\n if self._mock_delegate is not None :\n ret=self._mock_delegate.return_value\n \n if ret is DEFAULT:\n ret=self._get_child_mock(\n _new_parent=self,_new_name='()'\n )\n self.return_value=ret\n return ret\n \n \n def __set_return_value(self,value):\n if self._mock_delegate is not None :\n self._mock_delegate.return_value=value\n else :\n self._mock_return_value=value\n _check_and_set_parent(self,value,None ,'()')\n \n __return_value_doc=\"The value to be returned when the mock is called.\"\n return_value=property(__get_return_value,__set_return_value,\n __return_value_doc)\n \n \n @property\n def __class__(self):\n if self._spec_class is None :\n return type(self)\n return self._spec_class\n \n called=_delegating_property('called')\n call_count=_delegating_property('call_count')\n call_args=_delegating_property('call_args')\n call_args_list=_delegating_property('call_args_list')\n mock_calls=_delegating_property('mock_calls')\n \n \n def __get_side_effect(self):\n delegated=self._mock_delegate\n if delegated is None :\n return self._mock_side_effect\n sf=delegated.side_effect\n if (sf is not None and not callable(sf)\n and not isinstance(sf,_MockIter)and not _is_exception(sf)):\n sf=_MockIter(sf)\n delegated.side_effect=sf\n return sf\n \n def __set_side_effect(self,value):\n value=_try_iter(value)\n delegated=self._mock_delegate\n if delegated is None :\n self._mock_side_effect=value\n else :\n delegated.side_effect=value\n \n side_effect=property(__get_side_effect,__set_side_effect)\n \n \n def reset_mock(self,visited=None ,*,return_value=False ,side_effect=False ):\n ''\n if visited is None :\n visited=[]\n if id(self)in visited:\n return\n visited.append(id(self))\n \n self.called=False\n self.call_args=None\n self.call_count=0\n self.mock_calls=_CallList()\n self.call_args_list=_CallList()\n self.method_calls=_CallList()\n \n if return_value:\n self._mock_return_value=DEFAULT\n if side_effect:\n self._mock_side_effect=None\n \n for child in self._mock_children.values():\n if isinstance(child,_SpecState):\n continue\n child.reset_mock(visited)\n \n ret=self._mock_return_value\n if _is_instance_mock(ret)and ret is not self:\n ret.reset_mock(visited)\n \n \n def configure_mock(self,**kwargs):\n ''\n\n\n\n\n\n\n \n for arg,val in sorted(kwargs.items(),\n \n \n \n key=lambda entry:entry[0].count('.')):\n args=arg.split('.')\n final=args.pop()\n obj=self\n for entry in args:\n obj=getattr(obj,entry)\n setattr(obj,final,val)\n \n \n def __getattr__(self,name):\n if name in {'_mock_methods','_mock_unsafe'}:\n raise AttributeError(name)\n elif self._mock_methods is not None :\n if name not in self._mock_methods or name in _all_magics:\n raise AttributeError(\"Mock object has no attribute %r\"%name)\n elif _is_magic(name):\n raise AttributeError(name)\n if not self._mock_unsafe:\n if name.startswith(('assert','assret')):\n raise AttributeError(name)\n \n result=self._mock_children.get(name)\n if result is _deleted:\n raise AttributeError(name)\n elif result is None :\n wraps=None\n if self._mock_wraps is not None :\n \n \n wraps=getattr(self._mock_wraps,name)\n \n result=self._get_child_mock(\n parent=self,name=name,wraps=wraps,_new_name=name,\n _new_parent=self\n )\n self._mock_children[name]=result\n \n elif isinstance(result,_SpecState):\n result=create_autospec(\n result.spec,result.spec_set,result.instance,\n result.parent,result.name\n )\n self._mock_children[name]=result\n \n return result\n \n \n def _extract_mock_name(self):\n _name_list=[self._mock_new_name]\n _parent=self._mock_new_parent\n last=self\n \n dot='.'\n if _name_list ==['()']:\n dot=''\n seen=set()\n while _parent is not None :\n last=_parent\n \n _name_list.append(_parent._mock_new_name+dot)\n dot='.'\n if _parent._mock_new_name =='()':\n dot=''\n \n _parent=_parent._mock_new_parent\n \n \n if id(_parent)in seen:\n break\n seen.add(id(_parent))\n \n _name_list=list(reversed(_name_list))\n _first=last._mock_name or 'mock'\n if len(_name_list)>1:\n if _name_list[1]not in ('()','().'):\n _first +='.'\n _name_list[0]=_first\n return ''.join(_name_list)\n \n def __repr__(self):\n name=self._extract_mock_name()\n \n name_string=''\n if name not in ('mock','mock.'):\n name_string=' name=%r'%name\n \n spec_string=''\n if self._spec_class is not None :\n spec_string=' spec=%r'\n if self._spec_set:\n spec_string=' spec_set=%r'\n spec_string=spec_string %self._spec_class.__name__\n return \"<%s%s%s id='%s'>\"%(\n type(self).__name__,\n name_string,\n spec_string,\n id(self)\n )\n \n \n def __dir__(self):\n ''\n if not FILTER_DIR:\n return object.__dir__(self)\n \n extras=self._mock_methods or []\n from_type=dir(type(self))\n from_dict=list(self.__dict__)\n \n from_type=[e for e in from_type if not e.startswith('_')]\n from_dict=[e for e in from_dict if not e.startswith('_')or\n _is_magic(e)]\n return sorted(set(extras+from_type+from_dict+\n list(self._mock_children)))\n \n \n def __setattr__(self,name,value):\n if name in _allowed_names:\n \n return object.__setattr__(self,name,value)\n elif (self._spec_set and self._mock_methods is not None and\n name not in self._mock_methods and\n name not in self.__dict__):\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n elif name in _unsupported_magics:\n msg='Attempting to set unsupported magic method %r.'%name\n raise AttributeError(msg)\n elif name in _all_magics:\n if self._mock_methods is not None and name not in self._mock_methods:\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n \n if not _is_instance_mock(value):\n setattr(type(self),name,_get_method(name,value))\n original=value\n value=lambda *args,**kw:original(self,*args,**kw)\n else :\n \n \n _check_and_set_parent(self,value,None ,name)\n setattr(type(self),name,value)\n self._mock_children[name]=value\n elif name =='__class__':\n self._spec_class=value\n return\n else :\n if _check_and_set_parent(self,value,name,name):\n self._mock_children[name]=value\n \n if self._mock_sealed and not hasattr(self,name):\n mock_name=f'{self._extract_mock_name()}.{name}'\n raise AttributeError(f'Cannot set {mock_name}')\n \n return object.__setattr__(self,name,value)\n \n \n def __delattr__(self,name):\n if name in _all_magics and name in type(self).__dict__:\n delattr(type(self),name)\n if name not in self.__dict__:\n \n \n return\n \n if name in self.__dict__:\n object.__delattr__(self,name)\n \n obj=self._mock_children.get(name,_missing)\n if obj is _deleted:\n raise AttributeError(name)\n if obj is not _missing:\n del self._mock_children[name]\n self._mock_children[name]=_deleted\n \n \n def _format_mock_call_signature(self,args,kwargs):\n name=self._mock_name or 'mock'\n return _format_call_signature(name,args,kwargs)\n \n \n def _format_mock_failure_message(self,args,kwargs):\n message='Expected call: %s\\nActual call: %s'\n expected_string=self._format_mock_call_signature(args,kwargs)\n call_args=self.call_args\n if len(call_args)==3:\n call_args=call_args[1:]\n actual_string=self._format_mock_call_signature(*call_args)\n return message %(expected_string,actual_string)\n \n \n def _call_matcher(self,_call):\n ''\n\n\n\n\n \n sig=self._spec_signature\n if sig is not None :\n if len(_call)==2:\n name=''\n args,kwargs=_call\n else :\n name,args,kwargs=_call\n try :\n return name,sig.bind(*args,**kwargs)\n except TypeError as e:\n return e.with_traceback(None )\n else :\n return _call\n \n def assert_not_called(_mock_self):\n ''\n \n self=_mock_self\n if self.call_count !=0:\n msg=(\"Expected '%s' to not have been called. Called %s times.\"%\n (self._mock_name or 'mock',self.call_count))\n raise AssertionError(msg)\n \n def assert_called(_mock_self):\n ''\n \n self=_mock_self\n if self.call_count ==0:\n msg=(\"Expected '%s' to have been called.\"%\n self._mock_name or 'mock')\n raise AssertionError(msg)\n \n def assert_called_once(_mock_self):\n ''\n \n self=_mock_self\n if not self.call_count ==1:\n msg=(\"Expected '%s' to have been called once. Called %s times.\"%\n (self._mock_name or 'mock',self.call_count))\n raise AssertionError(msg)\n \n def assert_called_with(_mock_self,*args,**kwargs):\n ''\n\n\n \n self=_mock_self\n if self.call_args is None :\n expected=self._format_mock_call_signature(args,kwargs)\n raise AssertionError('Expected call: %s\\nNot called'%(expected,))\n \n def _error_message():\n msg=self._format_mock_failure_message(args,kwargs)\n return msg\n expected=self._call_matcher((args,kwargs))\n actual=self._call_matcher(self.call_args)\n if expected !=actual:\n cause=expected if isinstance(expected,Exception)else None\n raise AssertionError(_error_message())from cause\n \n \n def assert_called_once_with(_mock_self,*args,**kwargs):\n ''\n \n self=_mock_self\n if not self.call_count ==1:\n msg=(\"Expected '%s' to be called once. Called %s times.\"%\n (self._mock_name or 'mock',self.call_count))\n raise AssertionError(msg)\n return self.assert_called_with(*args,**kwargs)\n \n \n def assert_has_calls(self,calls,any_order=False ):\n ''\n\n\n\n\n\n\n\n \n expected=[self._call_matcher(c)for c in calls]\n cause=expected if isinstance(expected,Exception)else None\n all_calls=_CallList(self._call_matcher(c)for c in self.mock_calls)\n if not any_order:\n if expected not in all_calls:\n raise AssertionError(\n 'Calls not found.\\nExpected: %r\\n'\n 'Actual: %r'%(_CallList(calls),self.mock_calls)\n )from cause\n return\n \n all_calls=list(all_calls)\n \n not_found=[]\n for kall in expected:\n try :\n all_calls.remove(kall)\n except ValueError:\n not_found.append(kall)\n if not_found:\n raise AssertionError(\n '%r not all found in call list'%(tuple(not_found),)\n )from cause\n \n \n def assert_any_call(self,*args,**kwargs):\n ''\n\n\n\n \n expected=self._call_matcher((args,kwargs))\n actual=[self._call_matcher(c)for c in self.call_args_list]\n if expected not in actual:\n cause=expected if isinstance(expected,Exception)else None\n expected_string=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(\n '%s call not found'%expected_string\n )from cause\n \n \n def _get_child_mock(self,**kw):\n ''\n\n\n\n\n\n \n _type=type(self)\n if not issubclass(_type,CallableMixin):\n if issubclass(_type,NonCallableMagicMock):\n klass=MagicMock\n elif issubclass(_type,NonCallableMock):\n klass=Mock\n else :\n klass=_type.__mro__[1]\n \n if self._mock_sealed:\n attribute=\".\"+kw[\"name\"]if \"name\"in kw else \"()\"\n mock_name=self._extract_mock_name()+attribute\n raise AttributeError(mock_name)\n \n return klass(**kw)\n \n \n \ndef _try_iter(obj):\n if obj is None :\n return obj\n if _is_exception(obj):\n return obj\n if _callable(obj):\n return obj\n try :\n return iter(obj)\n except TypeError:\n \n \n return obj\n \n \n \nclass CallableMixin(Base):\n\n def __init__(self,spec=None ,side_effect=None ,return_value=DEFAULT,\n wraps=None ,name=None ,spec_set=None ,parent=None ,\n _spec_state=None ,_new_name='',_new_parent=None ,**kwargs):\n self.__dict__['_mock_return_value']=return_value\n \n _safe_super(CallableMixin,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state,_new_name,_new_parent,**kwargs\n )\n \n self.side_effect=side_effect\n \n \n def _mock_check_sig(self,*args,**kwargs):\n \n pass\n \n \n def __call__(_mock_self,*args,**kwargs):\n \n \n _mock_self._mock_check_sig(*args,**kwargs)\n return _mock_self._mock_call(*args,**kwargs)\n \n \n def _mock_call(_mock_self,*args,**kwargs):\n self=_mock_self\n self.called=True\n self.call_count +=1\n _new_name=self._mock_new_name\n _new_parent=self._mock_new_parent\n \n _call=_Call((args,kwargs),two=True )\n self.call_args=_call\n self.call_args_list.append(_call)\n self.mock_calls.append(_Call(('',args,kwargs)))\n \n seen=set()\n skip_next_dot=_new_name =='()'\n do_method_calls=self._mock_parent is not None\n name=self._mock_name\n while _new_parent is not None :\n this_mock_call=_Call((_new_name,args,kwargs))\n if _new_parent._mock_new_name:\n dot='.'\n if skip_next_dot:\n dot=''\n \n skip_next_dot=False\n if _new_parent._mock_new_name =='()':\n skip_next_dot=True\n \n _new_name=_new_parent._mock_new_name+dot+_new_name\n \n if do_method_calls:\n if _new_name ==name:\n this_method_call=this_mock_call\n else :\n this_method_call=_Call((name,args,kwargs))\n _new_parent.method_calls.append(this_method_call)\n \n do_method_calls=_new_parent._mock_parent is not None\n if do_method_calls:\n name=_new_parent._mock_name+'.'+name\n \n _new_parent.mock_calls.append(this_mock_call)\n _new_parent=_new_parent._mock_new_parent\n \n \n _new_parent_id=id(_new_parent)\n if _new_parent_id in seen:\n break\n seen.add(_new_parent_id)\n \n ret_val=DEFAULT\n effect=self.side_effect\n if effect is not None :\n if _is_exception(effect):\n raise effect\n \n if not _callable(effect):\n result=next(effect)\n if _is_exception(result):\n raise result\n if result is DEFAULT:\n result=self.return_value\n return result\n \n ret_val=effect(*args,**kwargs)\n \n if (self._mock_wraps is not None and\n self._mock_return_value is DEFAULT):\n return self._mock_wraps(*args,**kwargs)\n if ret_val is DEFAULT:\n ret_val=self.return_value\n return ret_val\n \n \n \nclass Mock(CallableMixin,NonCallableMock):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \ndef _dot_lookup(thing,comp,import_path):\n try :\n return getattr(thing,comp)\n except AttributeError:\n __import__(import_path)\n return getattr(thing,comp)\n \n \ndef _importer(target):\n components=target.split('.')\n import_path=components.pop(0)\n thing=__import__(import_path)\n \n for comp in components:\n import_path +=\".%s\"%comp\n thing=_dot_lookup(thing,comp,import_path)\n return thing\n \n \ndef _is_started(patcher):\n\n return hasattr(patcher,'is_local')\n \n \nclass _patch(object):\n\n attribute_name=None\n _active_patches=[]\n \n def __init__(\n self,getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs\n ):\n if new_callable is not None :\n if new is not DEFAULT:\n raise ValueError(\n \"Cannot use 'new' and 'new_callable' together\"\n )\n if autospec is not None :\n raise ValueError(\n \"Cannot use 'autospec' and 'new_callable' together\"\n )\n \n self.getter=getter\n self.attribute=attribute\n self.new=new\n self.new_callable=new_callable\n self.spec=spec\n self.create=create\n self.has_local=False\n self.spec_set=spec_set\n self.autospec=autospec\n self.kwargs=kwargs\n self.additional_patchers=[]\n \n \n def copy(self):\n patcher=_patch(\n self.getter,self.attribute,self.new,self.spec,\n self.create,self.spec_set,\n self.autospec,self.new_callable,self.kwargs\n )\n patcher.attribute_name=self.attribute_name\n patcher.additional_patchers=[\n p.copy()for p in self.additional_patchers\n ]\n return patcher\n \n \n def __call__(self,func):\n if isinstance(func,type):\n return self.decorate_class(func)\n return self.decorate_callable(func)\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n if not attr.startswith(patch.TEST_PREFIX):\n continue\n \n attr_value=getattr(klass,attr)\n if not hasattr(attr_value,\"__call__\"):\n continue\n \n patcher=self.copy()\n setattr(klass,attr,patcher(attr_value))\n return klass\n \n \n def decorate_callable(self,func):\n if hasattr(func,'patchings'):\n func.patchings.append(self)\n return func\n \n @wraps(func)\n def patched(*args,**keywargs):\n extra_args=[]\n entered_patchers=[]\n \n exc_info=tuple()\n try :\n for patching in patched.patchings:\n arg=patching.__enter__()\n entered_patchers.append(patching)\n if patching.attribute_name is not None :\n keywargs.update(arg)\n elif patching.new is DEFAULT:\n extra_args.append(arg)\n \n args +=tuple(extra_args)\n return func(*args,**keywargs)\n except :\n if (patching not in entered_patchers and\n _is_started(patching)):\n \n \n entered_patchers.append(patching)\n \n exc_info=sys.exc_info()\n \n raise\n finally :\n for patching in reversed(entered_patchers):\n patching.__exit__(*exc_info)\n \n patched.patchings=[self]\n return patched\n \n \n def get_original(self):\n target=self.getter()\n name=self.attribute\n \n original=DEFAULT\n local=False\n \n try :\n original=target.__dict__[name]\n except (AttributeError,KeyError):\n original=getattr(target,name,DEFAULT)\n else :\n local=True\n \n if name in _builtins and isinstance(target,ModuleType):\n self.create=True\n \n if not self.create and original is DEFAULT:\n raise AttributeError(\n \"%s does not have the attribute %r\"%(target,name)\n )\n return original,local\n \n \n def __enter__(self):\n ''\n new,spec,spec_set=self.new,self.spec,self.spec_set\n autospec,kwargs=self.autospec,self.kwargs\n new_callable=self.new_callable\n self.target=self.getter()\n \n \n if spec is False :\n spec=None\n if spec_set is False :\n spec_set=None\n if autospec is False :\n autospec=None\n \n if spec is not None and autospec is not None :\n raise TypeError(\"Can't specify spec and autospec\")\n if ((spec is not None or autospec is not None )and\n spec_set not in (True ,None )):\n raise TypeError(\"Can't provide explicit spec_set *and* spec or autospec\")\n \n original,local=self.get_original()\n \n if new is DEFAULT and autospec is None :\n inherit=False\n if spec is True :\n \n spec=original\n if spec_set is True :\n spec_set=original\n spec=None\n elif spec is not None :\n if spec_set is True :\n spec_set=spec\n spec=None\n elif spec_set is True :\n spec_set=original\n \n if spec is not None or spec_set is not None :\n if original is DEFAULT:\n raise TypeError(\"Can't use 'spec' with create=True\")\n if isinstance(original,type):\n \n inherit=True\n \n Klass=MagicMock\n _kwargs={}\n if new_callable is not None :\n Klass=new_callable\n elif spec is not None or spec_set is not None :\n this_spec=spec\n if spec_set is not None :\n this_spec=spec_set\n if _is_list(this_spec):\n not_callable='__call__'not in this_spec\n else :\n not_callable=not callable(this_spec)\n if not_callable:\n Klass=NonCallableMagicMock\n \n if spec is not None :\n _kwargs['spec']=spec\n if spec_set is not None :\n _kwargs['spec_set']=spec_set\n \n \n if (isinstance(Klass,type)and\n issubclass(Klass,NonCallableMock)and self.attribute):\n _kwargs['name']=self.attribute\n \n _kwargs.update(kwargs)\n new=Klass(**_kwargs)\n \n if inherit and _is_instance_mock(new):\n \n \n this_spec=spec\n if spec_set is not None :\n this_spec=spec_set\n if (not _is_list(this_spec)and not\n _instance_callable(this_spec)):\n Klass=NonCallableMagicMock\n \n _kwargs.pop('name')\n new.return_value=Klass(_new_parent=new,_new_name='()',\n **_kwargs)\n elif autospec is not None :\n \n \n \n if new is not DEFAULT:\n raise TypeError(\n \"autospec creates the mock for you. Can't specify \"\n \"autospec and new.\"\n )\n if original is DEFAULT:\n raise TypeError(\"Can't use 'autospec' with create=True\")\n spec_set=bool(spec_set)\n if autospec is True :\n autospec=original\n \n new=create_autospec(autospec,spec_set=spec_set,\n _name=self.attribute,**kwargs)\n elif kwargs:\n \n \n raise TypeError(\"Can't pass kwargs to a mock we aren't creating\")\n \n new_attr=new\n \n self.temp_original=original\n self.is_local=local\n setattr(self.target,self.attribute,new_attr)\n if self.attribute_name is not None :\n extra_args={}\n if self.new is DEFAULT:\n extra_args[self.attribute_name]=new\n for patching in self.additional_patchers:\n arg=patching.__enter__()\n if patching.new is DEFAULT:\n extra_args.update(arg)\n return extra_args\n \n return new\n \n \n def __exit__(self,*exc_info):\n ''\n if not _is_started(self):\n raise RuntimeError('stop called on unstarted patcher')\n \n if self.is_local and self.temp_original is not DEFAULT:\n setattr(self.target,self.attribute,self.temp_original)\n else :\n delattr(self.target,self.attribute)\n if not self.create and (not hasattr(self.target,self.attribute)or\n self.attribute in ('__doc__','__module__',\n '__defaults__','__annotations__',\n '__kwdefaults__')):\n \n setattr(self.target,self.attribute,self.temp_original)\n \n del self.temp_original\n del self.is_local\n del self.target\n for patcher in reversed(self.additional_patchers):\n if _is_started(patcher):\n patcher.__exit__(*exc_info)\n \n \n def start(self):\n ''\n result=self.__enter__()\n self._active_patches.append(self)\n return result\n \n \n def stop(self):\n ''\n try :\n self._active_patches.remove(self)\n except ValueError:\n \n pass\n \n return self.__exit__()\n \n \n \ndef _get_target(target):\n try :\n target,attribute=target.rsplit('.',1)\n except (TypeError,ValueError):\n raise TypeError(\"Need a valid target to patch. You supplied: %r\"%\n (target,))\n getter=lambda :_importer(target)\n return getter,attribute\n \n \ndef _patch_object(\ntarget,attribute,new=DEFAULT,spec=None ,\ncreate=False ,spec_set=None ,autospec=None ,\nnew_callable=None ,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n getter=lambda :target\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs\n )\n \n \ndef _patch_multiple(target,spec=None ,create=False ,spec_set=None ,\nautospec=None ,new_callable=None ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if type(target)is str:\n getter=lambda :_importer(target)\n else :\n getter=lambda :target\n \n if not kwargs:\n raise ValueError(\n 'Must supply at least one keyword argument with patch.multiple'\n )\n \n items=list(kwargs.items())\n attribute,new=items[0]\n patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n patcher.attribute_name=attribute\n for attribute,new in items[1:]:\n this_patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n this_patcher.attribute_name=attribute\n patcher.additional_patchers.append(this_patcher)\n return patcher\n \n \ndef patch(\ntarget,new=DEFAULT,spec=None ,create=False ,\nspec_set=None ,autospec=None ,new_callable=None ,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n getter,attribute=_get_target(target)\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs\n )\n \n \nclass _patch_dict(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,in_dict,values=(),clear=False ,**kwargs):\n if isinstance(in_dict,str):\n in_dict=_importer(in_dict)\n self.in_dict=in_dict\n \n self.values=dict(values)\n self.values.update(kwargs)\n self.clear=clear\n self._original=None\n \n \n def __call__(self,f):\n if isinstance(f,type):\n return self.decorate_class(f)\n @wraps(f)\n def _inner(*args,**kw):\n self._patch_dict()\n try :\n return f(*args,**kw)\n finally :\n self._unpatch_dict()\n \n return _inner\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n attr_value=getattr(klass,attr)\n if (attr.startswith(patch.TEST_PREFIX)and\n hasattr(attr_value,\"__call__\")):\n decorator=_patch_dict(self.in_dict,self.values,self.clear)\n decorated=decorator(attr_value)\n setattr(klass,attr,decorated)\n return klass\n \n \n def __enter__(self):\n ''\n self._patch_dict()\n \n \n def _patch_dict(self):\n values=self.values\n in_dict=self.in_dict\n clear=self.clear\n \n try :\n original=in_dict.copy()\n except AttributeError:\n \n \n original={}\n for key in in_dict:\n original[key]=in_dict[key]\n self._original=original\n \n if clear:\n _clear_dict(in_dict)\n \n try :\n in_dict.update(values)\n except AttributeError:\n \n for key in values:\n in_dict[key]=values[key]\n \n \n def _unpatch_dict(self):\n in_dict=self.in_dict\n original=self._original\n \n _clear_dict(in_dict)\n \n try :\n in_dict.update(original)\n except AttributeError:\n for key in original:\n in_dict[key]=original[key]\n \n \n def __exit__(self,*args):\n ''\n self._unpatch_dict()\n return False\n \n start=__enter__\n stop=__exit__\n \n \ndef _clear_dict(in_dict):\n try :\n in_dict.clear()\n except AttributeError:\n keys=list(in_dict)\n for key in keys:\n del in_dict[key]\n \n \ndef _patch_stopall():\n ''\n for patch in reversed(_patch._active_patches):\n patch.stop()\n \n \npatch.object=_patch_object\npatch.dict=_patch_dict\npatch.multiple=_patch_multiple\npatch.stopall=_patch_stopall\npatch.TEST_PREFIX='test'\n\nmagic_methods=(\n\"lt le gt ge eq ne \"\n\"getitem setitem delitem \"\n\"len contains iter \"\n\"hash str sizeof \"\n\"enter exit \"\n\n\n\"divmod rdivmod neg pos abs invert \"\n\"complex int float index \"\n\"trunc floor ceil \"\n\"bool next \"\n)\n\nnumerics=(\n\"add sub mul matmul div floordiv mod lshift rshift and xor or pow truediv\"\n)\ninplace=' '.join('i%s'%n for n in numerics.split())\nright=' '.join('r%s'%n for n in numerics.split())\n\n\n\n\n\n_non_defaults={\n'__get__','__set__','__delete__','__reversed__','__missing__',\n'__reduce__','__reduce_ex__','__getinitargs__','__getnewargs__',\n'__getstate__','__setstate__','__getformat__','__setformat__',\n'__repr__','__dir__','__subclasses__','__format__',\n'__getnewargs_ex__',\n}\n\n\ndef _get_method(name,func):\n ''\n def method(self,*args,**kw):\n return func(self,*args,**kw)\n method.__name__=name\n return method\n \n \n_magics={\n'__%s__'%method for method in\n' '.join([magic_methods,numerics,inplace,right]).split()\n}\n\n_all_magics=_magics |_non_defaults\n\n_unsupported_magics={\n'__getattr__','__setattr__',\n'__init__','__new__','__prepare__'\n'__instancecheck__','__subclasscheck__',\n'__del__'\n}\n\n_calculate_return_value={\n'__hash__':lambda self:object.__hash__(self),\n'__str__':lambda self:object.__str__(self),\n'__sizeof__':lambda self:object.__sizeof__(self),\n}\n\n_return_values={\n'__lt__':NotImplemented,\n'__gt__':NotImplemented,\n'__le__':NotImplemented,\n'__ge__':NotImplemented,\n'__int__':1,\n'__contains__':False ,\n'__len__':0,\n'__exit__':False ,\n'__complex__':1j,\n'__float__':1.0,\n'__bool__':True ,\n'__index__':1,\n}\n\n\ndef _get_eq(self):\n def __eq__(other):\n ret_val=self.__eq__._mock_return_value\n if ret_val is not DEFAULT:\n return ret_val\n if self is other:\n return True\n return NotImplemented\n return __eq__\n \ndef _get_ne(self):\n def __ne__(other):\n if self.__ne__._mock_return_value is not DEFAULT:\n return DEFAULT\n if self is other:\n return False\n return NotImplemented\n return __ne__\n \ndef _get_iter(self):\n def __iter__():\n ret_val=self.__iter__._mock_return_value\n if ret_val is DEFAULT:\n return iter([])\n \n \n return iter(ret_val)\n return __iter__\n \n_side_effect_methods={\n'__eq__':_get_eq,\n'__ne__':_get_ne,\n'__iter__':_get_iter,\n}\n\n\n\ndef _set_return_value(mock,method,name):\n fixed=_return_values.get(name,DEFAULT)\n if fixed is not DEFAULT:\n method.return_value=fixed\n return\n \n return_calulator=_calculate_return_value.get(name)\n if return_calulator is not None :\n try :\n return_value=return_calulator(mock)\n except AttributeError:\n \n \n return_value=AttributeError(name)\n method.return_value=return_value\n return\n \n side_effector=_side_effect_methods.get(name)\n if side_effector is not None :\n method.side_effect=side_effector(mock)\n \n \n \nclass MagicMixin(object):\n def __init__(self,*args,**kw):\n self._mock_set_magics()\n _safe_super(MagicMixin,self).__init__(*args,**kw)\n self._mock_set_magics()\n \n \n def _mock_set_magics(self):\n these_magics=_magics\n \n if getattr(self,\"_mock_methods\",None )is not None :\n these_magics=_magics.intersection(self._mock_methods)\n \n remove_magics=set()\n remove_magics=_magics -these_magics\n \n for entry in remove_magics:\n if entry in type(self).__dict__:\n \n delattr(self,entry)\n \n \n these_magics=these_magics -set(type(self).__dict__)\n \n _type=type(self)\n for entry in these_magics:\n setattr(_type,entry,MagicProxy(entry,self))\n \n \n \nclass NonCallableMagicMock(MagicMixin,NonCallableMock):\n ''\n def mock_add_spec(self,spec,spec_set=False ):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \n \nclass MagicMock(MagicMixin,Mock):\n ''\n\n\n\n\n\n\n\n\n \n def mock_add_spec(self,spec,spec_set=False ):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \n \nclass MagicProxy(object):\n def __init__(self,name,parent):\n self.name=name\n self.parent=parent\n \n def __call__(self,*args,**kwargs):\n m=self.create_mock()\n return m(*args,**kwargs)\n \n def create_mock(self):\n entry=self.name\n parent=self.parent\n m=parent._get_child_mock(name=entry,_new_name=entry,\n _new_parent=parent)\n setattr(parent,entry,m)\n _set_return_value(parent,m,entry)\n return m\n \n def __get__(self,obj,_type=None ):\n return self.create_mock()\n \n \n \nclass _ANY(object):\n ''\n \n def __eq__(self,other):\n return True\n \n def __ne__(self,other):\n return False\n \n def __repr__(self):\n return ''\n \nANY=_ANY()\n\n\n\ndef _format_call_signature(name,args,kwargs):\n message='%s(%%s)'%name\n formatted_args=''\n args_string=', '.join([repr(arg)for arg in args])\n kwargs_string=', '.join([\n '%s=%r'%(key,value)for key,value in sorted(kwargs.items())\n ])\n if args_string:\n formatted_args=args_string\n if kwargs_string:\n if formatted_args:\n formatted_args +=', '\n formatted_args +=kwargs_string\n \n return message %formatted_args\n \n \n \nclass _Call(tuple):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __new__(cls,value=(),name='',parent=None ,two=False ,\n from_kall=True ):\n args=()\n kwargs={}\n _len=len(value)\n if _len ==3:\n name,args,kwargs=value\n elif _len ==2:\n first,second=value\n if isinstance(first,str):\n name=first\n if isinstance(second,tuple):\n args=second\n else :\n kwargs=second\n else :\n args,kwargs=first,second\n elif _len ==1:\n value,=value\n if isinstance(value,str):\n name=value\n elif isinstance(value,tuple):\n args=value\n else :\n kwargs=value\n \n if two:\n return tuple.__new__(cls,(args,kwargs))\n \n return tuple.__new__(cls,(name,args,kwargs))\n \n \n def __init__(self,value=(),name=None ,parent=None ,two=False ,\n from_kall=True ):\n self.name=name\n self.parent=parent\n self.from_kall=from_kall\n \n \n def __eq__(self,other):\n if other is ANY:\n return True\n try :\n len_other=len(other)\n except TypeError:\n return False\n \n self_name=''\n if len(self)==2:\n self_args,self_kwargs=self\n else :\n self_name,self_args,self_kwargs=self\n \n other_name=''\n if len_other ==0:\n other_args,other_kwargs=(),{}\n elif len_other ==3:\n other_name,other_args,other_kwargs=other\n elif len_other ==1:\n value,=other\n if isinstance(value,tuple):\n other_args=value\n other_kwargs={}\n elif isinstance(value,str):\n other_name=value\n other_args,other_kwargs=(),{}\n else :\n other_args=()\n other_kwargs=value\n elif len_other ==2:\n \n first,second=other\n if isinstance(first,str):\n other_name=first\n if isinstance(second,tuple):\n other_args,other_kwargs=second,{}\n else :\n other_args,other_kwargs=(),second\n else :\n other_args,other_kwargs=first,second\n else :\n return False\n \n if self_name and other_name !=self_name:\n return False\n \n \n return (other_args,other_kwargs)==(self_args,self_kwargs)\n \n \n __ne__=object.__ne__\n \n \n def __call__(self,*args,**kwargs):\n if self.name is None :\n return _Call(('',args,kwargs),name='()')\n \n name=self.name+'()'\n return _Call((self.name,args,kwargs),name=name,parent=self)\n \n \n def __getattr__(self,attr):\n if self.name is None :\n return _Call(name=attr,from_kall=False )\n name='%s.%s'%(self.name,attr)\n return _Call(name=name,parent=self,from_kall=False )\n \n \n def count(self,*args,**kwargs):\n return self.__getattr__('count')(*args,**kwargs)\n \n def index(self,*args,**kwargs):\n return self.__getattr__('index')(*args,**kwargs)\n \n def __repr__(self):\n if not self.from_kall:\n name=self.name or 'call'\n if name.startswith('()'):\n name='call%s'%name\n return name\n \n if len(self)==2:\n name='call'\n args,kwargs=self\n else :\n name,args,kwargs=self\n if not name:\n name='call'\n elif not name.startswith('()'):\n name='call.%s'%name\n else :\n name='call%s'%name\n return _format_call_signature(name,args,kwargs)\n \n \n def call_list(self):\n ''\n\n \n vals=[]\n thing=self\n while thing is not None :\n if thing.from_kall:\n vals.append(thing)\n thing=thing.parent\n return _CallList(reversed(vals))\n \n \ncall=_Call(from_kall=False )\n\n\n\ndef create_autospec(spec,spec_set=False ,instance=False ,_parent=None ,\n_name=None ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _is_list(spec):\n \n \n spec=type(spec)\n \n is_type=isinstance(spec,type)\n \n _kwargs={'spec':spec}\n if spec_set:\n _kwargs={'spec_set':spec}\n elif spec is None :\n \n _kwargs={}\n if _kwargs and instance:\n _kwargs['_spec_as_instance']=True\n \n _kwargs.update(kwargs)\n \n Klass=MagicMock\n if inspect.isdatadescriptor(spec):\n \n \n _kwargs={}\n elif not _callable(spec):\n Klass=NonCallableMagicMock\n elif is_type and instance and not _instance_callable(spec):\n Klass=NonCallableMagicMock\n \n _name=_kwargs.pop('name',_name)\n \n _new_name=_name\n if _parent is None :\n \n _new_name=''\n \n mock=Klass(parent=_parent,_new_parent=_parent,_new_name=_new_name,\n name=_name,**_kwargs)\n \n if isinstance(spec,FunctionTypes):\n \n \n mock=_set_signature(mock,spec)\n else :\n _check_signature(spec,mock,is_type,instance)\n \n if _parent is not None and not instance:\n _parent._mock_children[_name]=mock\n \n if is_type and not instance and 'return_value'not in kwargs:\n mock.return_value=create_autospec(spec,spec_set,instance=True ,\n _name='()',_parent=mock)\n \n for entry in dir(spec):\n if _is_magic(entry):\n \n continue\n \n \n \n \n \n \n \n \n \n \n try :\n original=getattr(spec,entry)\n except AttributeError:\n continue\n \n kwargs={'spec':original}\n if spec_set:\n kwargs={'spec_set':original}\n \n if not isinstance(original,FunctionTypes):\n new=_SpecState(original,spec_set,mock,entry,instance)\n mock._mock_children[entry]=new\n else :\n parent=mock\n if isinstance(spec,FunctionTypes):\n parent=mock.mock\n \n skipfirst=_must_skip(spec,entry,is_type)\n kwargs['_eat_self']=skipfirst\n new=MagicMock(parent=parent,name=entry,_new_name=entry,\n _new_parent=parent,\n **kwargs)\n mock._mock_children[entry]=new\n _check_signature(original,new,skipfirst=skipfirst)\n \n \n \n \n \n if isinstance(new,FunctionTypes):\n setattr(mock,entry,new)\n \n return mock\n \n \ndef _must_skip(spec,entry,is_type):\n ''\n\n\n \n if not isinstance(spec,type):\n if entry in getattr(spec,'__dict__',{}):\n \n return False\n spec=spec.__class__\n \n for klass in spec.__mro__:\n result=klass.__dict__.get(entry,DEFAULT)\n if result is DEFAULT:\n continue\n if isinstance(result,(staticmethod,classmethod)):\n return False\n elif isinstance(getattr(result,'__get__',None ),MethodWrapperTypes):\n \n \n return is_type\n else :\n return False\n \n \n \n return is_type\n \n \ndef _get_class(obj):\n try :\n return obj.__class__\n except AttributeError:\n \n return type(obj)\n \n \nclass _SpecState(object):\n\n def __init__(self,spec,spec_set=False ,parent=None ,\n name=None ,ids=None ,instance=False ):\n self.spec=spec\n self.ids=ids\n self.spec_set=spec_set\n self.parent=parent\n self.instance=instance\n self.name=name\n \n \nFunctionTypes=(\n\ntype(create_autospec),\n\ntype(ANY.__eq__),\n)\n\nMethodWrapperTypes=(\ntype(ANY.__eq__.__get__),\n)\n\n\nfile_spec=None\n\ndef _iterate_read_data(read_data):\n\n\n\n sep=b'\\n'if isinstance(read_data,bytes)else '\\n'\n data_as_list=[l+sep for l in read_data.split(sep)]\n \n if data_as_list[-1]==sep:\n \n \n data_as_list=data_as_list[:-1]\n else :\n \n \n \n data_as_list[-1]=data_as_list[-1][:-1]\n \n for line in data_as_list:\n yield line\n \n \ndef mock_open(mock=None ,read_data=''):\n ''\n\n\n\n\n\n\n\n\n\n \n def _readlines_side_effect(*args,**kwargs):\n if handle.readlines.return_value is not None :\n return handle.readlines.return_value\n return list(_state[0])\n \n def _read_side_effect(*args,**kwargs):\n if handle.read.return_value is not None :\n return handle.read.return_value\n return type(read_data)().join(_state[0])\n \n def _readline_side_effect():\n if handle.readline.return_value is not None :\n while True :\n yield handle.readline.return_value\n for line in _state[0]:\n yield line\n while True :\n yield type(read_data)()\n \n \n global file_spec\n if file_spec is None :\n import _io\n file_spec=list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))\n \n if mock is None :\n mock=MagicMock(name='open',spec=open)\n \n handle=MagicMock(spec=file_spec)\n handle.__enter__.return_value=handle\n \n _state=[_iterate_read_data(read_data),None ]\n \n handle.write.return_value=None\n handle.read.return_value=None\n handle.readline.return_value=None\n handle.readlines.return_value=None\n \n handle.read.side_effect=_read_side_effect\n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n handle.readlines.side_effect=_readlines_side_effect\n \n def reset_data(*args,**kwargs):\n _state[0]=_iterate_read_data(read_data)\n if handle.readline.side_effect ==_state[1]:\n \n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n return DEFAULT\n \n mock.side_effect=reset_data\n mock.return_value=handle\n return mock\n \n \nclass PropertyMock(Mock):\n ''\n\n\n\n\n\n\n \n def _get_child_mock(self,**kwargs):\n return MagicMock(**kwargs)\n \n def __get__(self,obj,obj_type):\n return self()\n def __set__(self,obj,val):\n self(val)\n \n \ndef seal(mock):\n ''\n\n\n\n\n\n\n\n\n \n mock._mock_sealed=True\n for attr in dir(mock):\n try :\n m=getattr(mock,attr)\n except AttributeError:\n continue\n if not isinstance(m,NonCallableMock):\n continue\n if m._mock_new_parent is mock:\n seal(m)\n", ["_io", "builtins", "functools", "inspect", "java", "pprint", "sys", "types"]], "unittest.result": [".py", "''\n\nimport io\nimport sys\nimport traceback\n\nfrom . import util\nfrom functools import wraps\n\n__unittest=True\n\ndef failfast(method):\n @wraps(method)\n def inner(self,*args,**kw):\n if getattr(self,'failfast',False ):\n self.stop()\n return method(self,*args,**kw)\n return inner\n \nSTDOUT_LINE='\\nStdout:\\n%s'\nSTDERR_LINE='\\nStderr:\\n%s'\n\n\nclass TestResult(object):\n ''\n\n\n\n\n\n\n\n\n \n _previousTestClass=None\n _testRunEntered=False\n _moduleSetUpFailed=False\n def __init__(self,stream=None ,descriptions=None ,verbosity=None ):\n self.failfast=False\n self.failures=[]\n self.errors=[]\n self.testsRun=0\n self.skipped=[]\n self.expectedFailures=[]\n self.unexpectedSuccesses=[]\n self.shouldStop=False\n self.buffer=False\n self.tb_locals=False\n self._stdout_buffer=None\n self._stderr_buffer=None\n self._original_stdout=sys.stdout\n self._original_stderr=sys.stderr\n self._mirrorOutput=False\n \n def printErrors(self):\n ''\n \n def startTest(self,test):\n ''\n self.testsRun +=1\n self._mirrorOutput=False\n self._setupStdout()\n \n def _setupStdout(self):\n if self.buffer:\n if self._stderr_buffer is None :\n self._stderr_buffer=io.StringIO()\n self._stdout_buffer=io.StringIO()\n sys.stdout=self._stdout_buffer\n sys.stderr=self._stderr_buffer\n \n def startTestRun(self):\n ''\n\n\n \n \n def stopTest(self,test):\n ''\n self._restoreStdout()\n self._mirrorOutput=False\n \n def _restoreStdout(self):\n if self.buffer:\n if self._mirrorOutput:\n output=sys.stdout.getvalue()\n error=sys.stderr.getvalue()\n if output:\n if not output.endswith('\\n'):\n output +='\\n'\n self._original_stdout.write(STDOUT_LINE %output)\n if error:\n if not error.endswith('\\n'):\n error +='\\n'\n self._original_stderr.write(STDERR_LINE %error)\n \n sys.stdout=self._original_stdout\n sys.stderr=self._original_stderr\n self._stdout_buffer.seek(0)\n self._stdout_buffer.truncate()\n self._stderr_buffer.seek(0)\n self._stderr_buffer.truncate()\n \n def stopTestRun(self):\n ''\n\n\n \n \n @failfast\n def addError(self,test,err):\n ''\n\n \n self.errors.append((test,self._exc_info_to_string(err,test)))\n self._mirrorOutput=True\n \n @failfast\n def addFailure(self,test,err):\n ''\n \n self.failures.append((test,self._exc_info_to_string(err,test)))\n self._mirrorOutput=True\n \n def addSubTest(self,test,subtest,err):\n ''\n\n\n \n \n \n if err is not None :\n if getattr(self,'failfast',False ):\n self.stop()\n if issubclass(err[0],test.failureException):\n errors=self.failures\n else :\n errors=self.errors\n errors.append((subtest,self._exc_info_to_string(err,test)))\n self._mirrorOutput=True\n \n def addSuccess(self,test):\n ''\n pass\n \n def addSkip(self,test,reason):\n ''\n self.skipped.append((test,reason))\n \n def addExpectedFailure(self,test,err):\n ''\n self.expectedFailures.append(\n (test,self._exc_info_to_string(err,test)))\n \n @failfast\n def addUnexpectedSuccess(self,test):\n ''\n self.unexpectedSuccesses.append(test)\n \n def wasSuccessful(self):\n ''\n \n \n \n return ((len(self.failures)==len(self.errors)==0)and\n (not hasattr(self,'unexpectedSuccesses')or\n len(self.unexpectedSuccesses)==0))\n \n def stop(self):\n ''\n self.shouldStop=True\n \n def _exc_info_to_string(self,err,test):\n ''\n exctype,value,tb=err\n \n while tb and self._is_relevant_tb_level(tb):\n tb=tb.tb_next\n \n if exctype is test.failureException:\n \n length=self._count_relevant_tb_levels(tb)\n else :\n length=None\n tb_e=traceback.TracebackException(\n exctype,value,tb,limit=length,capture_locals=self.tb_locals)\n msgLines=list(tb_e.format())\n \n if self.buffer:\n output=sys.stdout.getvalue()\n error=sys.stderr.getvalue()\n if output:\n if not output.endswith('\\n'):\n output +='\\n'\n msgLines.append(STDOUT_LINE %output)\n if error:\n if not error.endswith('\\n'):\n error +='\\n'\n msgLines.append(STDERR_LINE %error)\n return ''.join(msgLines)\n \n \n def _is_relevant_tb_level(self,tb):\n return '__unittest'in tb.tb_frame.f_globals\n \n def _count_relevant_tb_levels(self,tb):\n length=0\n while tb and not self._is_relevant_tb_level(tb):\n length +=1\n tb=tb.tb_next\n return length\n \n def __repr__(self):\n return (\"<%s run=%i errors=%i failures=%i>\"%\n (util.strclass(self.__class__),self.testsRun,len(self.errors),\n len(self.failures)))\n", ["functools", "io", "sys", "traceback", "unittest", "unittest.util"]], "unittest.runner": [".py", "''\n\nimport sys\nimport time\nimport warnings\n\nfrom . import result\nfrom .signals import registerResult\n\n__unittest=True\n\n\nclass _WritelnDecorator(object):\n ''\n def __init__(self,stream):\n self.stream=stream\n \n def __getattr__(self,attr):\n if attr in ('stream','__getstate__'):\n raise AttributeError(attr)\n return getattr(self.stream,attr)\n \n def writeln(self,arg=None ):\n if arg:\n self.write(arg)\n self.write('\\n')\n \n \nclass TextTestResult(result.TestResult):\n ''\n\n\n \n separator1='='*70\n separator2='-'*70\n \n def __init__(self,stream,descriptions,verbosity):\n super(TextTestResult,self).__init__(stream,descriptions,verbosity)\n self.stream=stream\n self.showAll=verbosity >1\n self.dots=verbosity ==1\n self.descriptions=descriptions\n \n def getDescription(self,test):\n doc_first_line=test.shortDescription()\n if self.descriptions and doc_first_line:\n return '\\n'.join((str(test),doc_first_line))\n else :\n return str(test)\n \n def startTest(self,test):\n super(TextTestResult,self).startTest(test)\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n \n def addSuccess(self,test):\n super(TextTestResult,self).addSuccess(test)\n if self.showAll:\n self.stream.writeln(\"ok\")\n elif self.dots:\n self.stream.write('.')\n self.stream.flush()\n \n def addError(self,test,err):\n super(TextTestResult,self).addError(test,err)\n if self.showAll:\n self.stream.writeln(\"ERROR\")\n elif self.dots:\n self.stream.write('E')\n self.stream.flush()\n \n def addFailure(self,test,err):\n super(TextTestResult,self).addFailure(test,err)\n if self.showAll:\n self.stream.writeln(\"FAIL\")\n elif self.dots:\n self.stream.write('F')\n self.stream.flush()\n \n def addSkip(self,test,reason):\n super(TextTestResult,self).addSkip(test,reason)\n if self.showAll:\n self.stream.writeln(\"skipped {0!r}\".format(reason))\n elif self.dots:\n self.stream.write(\"s\")\n self.stream.flush()\n \n def addExpectedFailure(self,test,err):\n super(TextTestResult,self).addExpectedFailure(test,err)\n if self.showAll:\n self.stream.writeln(\"expected failure\")\n elif self.dots:\n self.stream.write(\"x\")\n self.stream.flush()\n \n def addUnexpectedSuccess(self,test):\n super(TextTestResult,self).addUnexpectedSuccess(test)\n if self.showAll:\n self.stream.writeln(\"unexpected success\")\n elif self.dots:\n self.stream.write(\"u\")\n self.stream.flush()\n \n def printErrors(self):\n if self.dots or self.showAll:\n self.stream.writeln()\n self.printErrorList('ERROR',self.errors)\n self.printErrorList('FAIL',self.failures)\n \n def printErrorList(self,flavour,errors):\n for test,err in errors:\n self.stream.writeln(self.separator1)\n self.stream.writeln(\"%s: %s\"%(flavour,self.getDescription(test)))\n self.stream.writeln(self.separator2)\n self.stream.writeln(\"%s\"%err)\n \n \nclass TextTestRunner(object):\n ''\n\n\n\n \n resultclass=TextTestResult\n \n def __init__(self,stream=None ,descriptions=True ,verbosity=1,\n failfast=False ,buffer=False ,resultclass=None ,warnings=None ,\n *,tb_locals=False ):\n ''\n\n\n\n \n if stream is None :\n stream=sys.stderr\n self.stream=_WritelnDecorator(stream)\n self.descriptions=descriptions\n self.verbosity=verbosity\n self.failfast=failfast\n self.buffer=buffer\n self.tb_locals=tb_locals\n self.warnings=warnings\n if resultclass is not None :\n self.resultclass=resultclass\n \n def _makeResult(self):\n return self.resultclass(self.stream,self.descriptions,self.verbosity)\n \n def run(self,test):\n ''\n result=self._makeResult()\n registerResult(result)\n result.failfast=self.failfast\n result.buffer=self.buffer\n result.tb_locals=self.tb_locals\n with warnings.catch_warnings():\n if self.warnings:\n \n warnings.simplefilter(self.warnings)\n \n \n \n \n \n if self.warnings in ['default','always']:\n warnings.filterwarnings('module',\n category=DeprecationWarning,\n message=r'Please use assert\\w+ instead.')\n startTime=time.time()\n startTestRun=getattr(result,'startTestRun',None )\n if startTestRun is not None :\n startTestRun()\n try :\n test(result)\n finally :\n stopTestRun=getattr(result,'stopTestRun',None )\n if stopTestRun is not None :\n stopTestRun()\n stopTime=time.time()\n timeTaken=stopTime -startTime\n result.printErrors()\n if hasattr(result,'separator2'):\n self.stream.writeln(result.separator2)\n run=result.testsRun\n self.stream.writeln(\"Ran %d test%s in %.3fs\"%\n (run,run !=1 and \"s\"or \"\",timeTaken))\n self.stream.writeln()\n \n expectedFails=unexpectedSuccesses=skipped=0\n try :\n results=map(len,(result.expectedFailures,\n result.unexpectedSuccesses,\n result.skipped))\n except AttributeError:\n pass\n else :\n expectedFails,unexpectedSuccesses,skipped=results\n \n infos=[]\n if not result.wasSuccessful():\n self.stream.write(\"FAILED\")\n failed,errored=len(result.failures),len(result.errors)\n if failed:\n infos.append(\"failures=%d\"%failed)\n if errored:\n infos.append(\"errors=%d\"%errored)\n else :\n self.stream.write(\"OK\")\n if skipped:\n infos.append(\"skipped=%d\"%skipped)\n if expectedFails:\n infos.append(\"expected failures=%d\"%expectedFails)\n if unexpectedSuccesses:\n infos.append(\"unexpected successes=%d\"%unexpectedSuccesses)\n if infos:\n self.stream.writeln(\" (%s)\"%(\", \".join(infos),))\n else :\n self.stream.write(\"\\n\")\n return result\n", ["sys", "time", "unittest", "unittest.result", "unittest.signals", "warnings"]], "unittest.signals": [".py", "import signal\nimport weakref\n\nfrom functools import wraps\n\n__unittest=True\n\n\nclass _InterruptHandler(object):\n def __init__(self,default_handler):\n self.called=False\n self.original_handler=default_handler\n if isinstance(default_handler,int):\n if default_handler ==signal.SIG_DFL:\n \n default_handler=signal.default_int_handler\n elif default_handler ==signal.SIG_IGN:\n \n \n def default_handler(unused_signum,unused_frame):\n pass\n else :\n raise TypeError(\"expected SIGINT signal handler to be \"\n \"signal.SIG_IGN, signal.SIG_DFL, or a \"\n \"callable object\")\n self.default_handler=default_handler\n \n def __call__(self,signum,frame):\n installed_handler=signal.getsignal(signal.SIGINT)\n if installed_handler is not self:\n \n \n self.default_handler(signum,frame)\n \n if self.called:\n self.default_handler(signum,frame)\n self.called=True\n for result in _results.keys():\n result.stop()\n \n_results=weakref.WeakKeyDictionary()\ndef registerResult(result):\n _results[result]=1\n \ndef removeResult(result):\n return bool(_results.pop(result,None ))\n \n_interrupt_handler=None\ndef installHandler():\n global _interrupt_handler\n if _interrupt_handler is None :\n default_handler=signal.getsignal(signal.SIGINT)\n _interrupt_handler=_InterruptHandler(default_handler)\n signal.signal(signal.SIGINT,_interrupt_handler)\n \n \ndef removeHandler(method=None ):\n if method is not None :\n @wraps(method)\n def inner(*args,**kwargs):\n initial=signal.getsignal(signal.SIGINT)\n removeHandler()\n try :\n return method(*args,**kwargs)\n finally :\n signal.signal(signal.SIGINT,initial)\n return inner\n \n global _interrupt_handler\n if _interrupt_handler is not None :\n signal.signal(signal.SIGINT,_interrupt_handler.original_handler)\n", ["functools", "signal", "weakref"]], "unittest.suite": [".py", "''\n\nimport sys\n\nfrom . import case\nfrom . import util\n\n__unittest=True\n\n\ndef _call_if_exists(parent,attr):\n func=getattr(parent,attr,lambda :None )\n func()\n \n \nclass BaseTestSuite(object):\n ''\n \n _cleanup=True\n \n def __init__(self,tests=()):\n self._tests=[]\n self._removed_tests=0\n self.addTests(tests)\n \n def __repr__(self):\n return \"<%s tests=%s>\"%(util.strclass(self.__class__),list(self))\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return list(self)==list(other)\n \n def __iter__(self):\n return iter(self._tests)\n \n def countTestCases(self):\n cases=self._removed_tests\n for test in self:\n if test:\n cases +=test.countTestCases()\n return cases\n \n def addTest(self,test):\n \n if not callable(test):\n raise TypeError(\"{} is not callable\".format(repr(test)))\n if isinstance(test,type)and issubclass(test,\n (case.TestCase,TestSuite)):\n raise TypeError(\"TestCases and TestSuites must be instantiated \"\n \"before passing them to addTest()\")\n self._tests.append(test)\n \n def addTests(self,tests):\n if isinstance(tests,str):\n raise TypeError(\"tests must be an iterable of tests, not a string\")\n for test in tests:\n self.addTest(test)\n \n def run(self,result):\n for index,test in enumerate(self):\n if result.shouldStop:\n break\n test(result)\n if self._cleanup:\n self._removeTestAtIndex(index)\n return result\n \n def _removeTestAtIndex(self,index):\n ''\n try :\n test=self._tests[index]\n except TypeError:\n \n pass\n else :\n \n \n if hasattr(test,'countTestCases'):\n self._removed_tests +=test.countTestCases()\n self._tests[index]=None\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n for test in self:\n test.debug()\n \n \nclass TestSuite(BaseTestSuite):\n ''\n\n\n\n\n\n\n \n \n def run(self,result,debug=False ):\n topLevel=False\n if getattr(result,'_testRunEntered',False )is False :\n result._testRunEntered=topLevel=True\n \n for index,test in enumerate(self):\n if result.shouldStop:\n break\n \n if _isnotsuite(test):\n self._tearDownPreviousClass(test,result)\n self._handleModuleFixture(test,result)\n self._handleClassSetUp(test,result)\n result._previousTestClass=test.__class__\n \n if (getattr(test.__class__,'_classSetupFailed',False )or\n getattr(result,'_moduleSetUpFailed',False )):\n continue\n \n if not debug:\n test(result)\n else :\n test.debug()\n \n if self._cleanup:\n self._removeTestAtIndex(index)\n \n if topLevel:\n self._tearDownPreviousClass(None ,result)\n self._handleModuleTearDown(result)\n result._testRunEntered=False\n return result\n \n def debug(self):\n ''\n debug=_DebugResult()\n self.run(debug,True )\n \n \n \n def _handleClassSetUp(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None )\n currentClass=test.__class__\n if currentClass ==previousClass:\n return\n if result._moduleSetUpFailed:\n return\n if getattr(currentClass,\"__unittest_skip__\",False ):\n return\n \n try :\n currentClass._classSetupFailed=False\n except TypeError:\n \n \n pass\n \n setUpClass=getattr(currentClass,'setUpClass',None )\n if setUpClass is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n setUpClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n currentClass._classSetupFailed=True\n className=util.strclass(currentClass)\n errorName='setUpClass (%s)'%className\n self._addClassOrModuleLevelException(result,e,errorName)\n finally :\n _call_if_exists(result,'_restoreStdout')\n \n def _get_previous_module(self,result):\n previousModule=None\n previousClass=getattr(result,'_previousTestClass',None )\n if previousClass is not None :\n previousModule=previousClass.__module__\n return previousModule\n \n \n def _handleModuleFixture(self,test,result):\n previousModule=self._get_previous_module(result)\n currentModule=test.__class__.__module__\n if currentModule ==previousModule:\n return\n \n self._handleModuleTearDown(result)\n \n \n result._moduleSetUpFailed=False\n try :\n module=sys.modules[currentModule]\n except KeyError:\n return\n setUpModule=getattr(module,'setUpModule',None )\n if setUpModule is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n setUpModule()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n result._moduleSetUpFailed=True\n errorName='setUpModule (%s)'%currentModule\n self._addClassOrModuleLevelException(result,e,errorName)\n finally :\n _call_if_exists(result,'_restoreStdout')\n \n def _addClassOrModuleLevelException(self,result,exception,errorName):\n error=_ErrorHolder(errorName)\n addSkip=getattr(result,'addSkip',None )\n if addSkip is not None and isinstance(exception,case.SkipTest):\n addSkip(error,str(exception))\n else :\n result.addError(error,sys.exc_info())\n \n def _handleModuleTearDown(self,result):\n previousModule=self._get_previous_module(result)\n if previousModule is None :\n return\n if result._moduleSetUpFailed:\n return\n \n try :\n module=sys.modules[previousModule]\n except KeyError:\n return\n \n tearDownModule=getattr(module,'tearDownModule',None )\n if tearDownModule is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n tearDownModule()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n errorName='tearDownModule (%s)'%previousModule\n self._addClassOrModuleLevelException(result,e,errorName)\n finally :\n _call_if_exists(result,'_restoreStdout')\n \n def _tearDownPreviousClass(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None )\n currentClass=test.__class__\n if currentClass ==previousClass:\n return\n if getattr(previousClass,'_classSetupFailed',False ):\n return\n if getattr(result,'_moduleSetUpFailed',False ):\n return\n if getattr(previousClass,\"__unittest_skip__\",False ):\n return\n \n tearDownClass=getattr(previousClass,'tearDownClass',None )\n if tearDownClass is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n tearDownClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n className=util.strclass(previousClass)\n errorName='tearDownClass (%s)'%className\n self._addClassOrModuleLevelException(result,e,errorName)\n finally :\n _call_if_exists(result,'_restoreStdout')\n \n \nclass _ErrorHolder(object):\n ''\n\n\n\n \n \n \n \n \n failureException=None\n \n def __init__(self,description):\n self.description=description\n \n def id(self):\n return self.description\n \n def shortDescription(self):\n return None\n \n def __repr__(self):\n return \"\"%(self.description,)\n \n def __str__(self):\n return self.id()\n \n def run(self,result):\n \n \n pass\n \n def __call__(self,result):\n return self.run(result)\n \n def countTestCases(self):\n return 0\n \ndef _isnotsuite(test):\n ''\n try :\n iter(test)\n except TypeError:\n return True\n return False\n \n \nclass _DebugResult(object):\n ''\n _previousTestClass=None\n _moduleSetUpFailed=False\n shouldStop=False\n", ["sys", "unittest", "unittest.case", "unittest.util"]], "unittest.util": [".py", "''\n\nfrom collections import namedtuple,Counter\nfrom os.path import commonprefix\n\n__unittest=True\n\n_MAX_LENGTH=80\n_PLACEHOLDER_LEN=12\n_MIN_BEGIN_LEN=5\n_MIN_END_LEN=5\n_MIN_COMMON_LEN=5\n_MIN_DIFF_LEN=_MAX_LENGTH -\\\n(_MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\n_PLACEHOLDER_LEN+_MIN_END_LEN)\nassert _MIN_DIFF_LEN >=0\n\ndef _shorten(s,prefixlen,suffixlen):\n skip=len(s)-prefixlen -suffixlen\n if skip >_PLACEHOLDER_LEN:\n s='%s[%d chars]%s'%(s[:prefixlen],skip,s[len(s)-suffixlen:])\n return s\n \ndef _common_shorten_repr(*args):\n args=tuple(map(safe_repr,args))\n maxlen=max(map(len,args))\n if maxlen <=_MAX_LENGTH:\n return args\n \n prefix=commonprefix(args)\n prefixlen=len(prefix)\n \n common_len=_MAX_LENGTH -\\\n (maxlen -prefixlen+_MIN_BEGIN_LEN+_PLACEHOLDER_LEN)\n if common_len >_MIN_COMMON_LEN:\n assert _MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\\\n (maxlen -prefixlen)<_MAX_LENGTH\n prefix=_shorten(prefix,_MIN_BEGIN_LEN,common_len)\n return tuple(prefix+s[prefixlen:]for s in args)\n \n prefix=_shorten(prefix,_MIN_BEGIN_LEN,_MIN_COMMON_LEN)\n return tuple(prefix+_shorten(s[prefixlen:],_MIN_DIFF_LEN,_MIN_END_LEN)\n for s in args)\n \ndef safe_repr(obj,short=False ):\n try :\n result=repr(obj)\n except Exception:\n result=object.__repr__(obj)\n if not short or len(result)<_MAX_LENGTH:\n return result\n return result[:_MAX_LENGTH]+' [truncated]...'\n \ndef strclass(cls):\n return \"%s.%s\"%(cls.__module__,cls.__qualname__)\n \ndef sorted_list_difference(expected,actual):\n ''\n\n\n\n\n\n \n i=j=0\n missing=[]\n unexpected=[]\n while True :\n try :\n e=expected[i]\n a=actual[j]\n if e a:\n unexpected.append(a)\n j +=1\n while actual[j]==a:\n j +=1\n else :\n i +=1\n try :\n while expected[i]==e:\n i +=1\n finally :\n j +=1\n while actual[j]==a:\n j +=1\n except IndexError:\n missing.extend(expected[i:])\n unexpected.extend(actual[j:])\n break\n return missing,unexpected\n \n \ndef unorderable_list_difference(expected,actual):\n ''\n\n\n\n \n missing=[]\n while expected:\n item=expected.pop()\n try :\n actual.remove(item)\n except ValueError:\n missing.append(item)\n \n \n return missing,actual\n \ndef three_way_cmp(x,y):\n ''\n return (x >y)-(x =0:\n delim=min(delim,wdelim)\n return url[start:delim],url[delim:]\n \ndef urlsplit(url,scheme='',allow_fragments=True ):\n ''\n\n\n\n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n allow_fragments=bool(allow_fragments)\n key=url,scheme,allow_fragments,type(url),type(scheme)\n cached=_parse_cache.get(key,None )\n if cached:\n return _coerce_result(cached)\n if len(_parse_cache)>=MAX_CACHE_SIZE:\n clear_cache()\n netloc=query=fragment=''\n i=url.find(':')\n if i >0:\n if url[:i]=='http':\n scheme=url[:i].lower()\n url=url[i+1:]\n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if (('['in netloc and ']'not in netloc)or\n (']'in netloc and '['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if allow_fragments and '#'in url:\n url,fragment=url.split('#',1)\n if '?'in url:\n url,query=url.split('?',1)\n v=SplitResult(scheme,netloc,url,query,fragment)\n _parse_cache[key]=v\n return _coerce_result(v)\n for c in url[:i]:\n if c not in scheme_chars:\n break\n else :\n \n \n rest=url[i+1:]\n if not rest or any(c not in '0123456789'for c in rest):\n \n scheme,url=url[:i].lower(),rest\n \n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if (('['in netloc and ']'not in netloc)or\n (']'in netloc and '['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if allow_fragments and '#'in url:\n url,fragment=url.split('#',1)\n if '?'in url:\n url,query=url.split('?',1)\n v=SplitResult(scheme,netloc,url,query,fragment)\n _parse_cache[key]=v\n return _coerce_result(v)\n \ndef urlunparse(components):\n ''\n\n\n \n scheme,netloc,url,params,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if params:\n url=\"%s;%s\"%(url,params)\n return _coerce_result(urlunsplit((scheme,netloc,url,query,fragment)))\n \ndef urlunsplit(components):\n ''\n\n\n\n \n scheme,netloc,url,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if netloc or (scheme and scheme in uses_netloc and url[:2]!='//'):\n if url and url[:1]!='/':url='/'+url\n url='//'+(netloc or '')+url\n if scheme:\n url=scheme+':'+url\n if query:\n url=url+'?'+query\n if fragment:\n url=url+'#'+fragment\n return _coerce_result(url)\n \ndef urljoin(base,url,allow_fragments=True ):\n ''\n \n if not base:\n return url\n if not url:\n return base\n base,url,_coerce_result=_coerce_args(base,url)\n bscheme,bnetloc,bpath,bparams,bquery,bfragment=\\\n urlparse(base,'',allow_fragments)\n scheme,netloc,path,params,query,fragment=\\\n urlparse(url,bscheme,allow_fragments)\n if scheme !=bscheme or scheme not in uses_relative:\n return _coerce_result(url)\n if scheme in uses_netloc:\n if netloc:\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n netloc=bnetloc\n if path[:1]=='/':\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n if not path and not params:\n path=bpath\n params=bparams\n if not query:\n query=bquery\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n segments=bpath.split('/')[:-1]+path.split('/')\n \n if segments[-1]=='.':\n segments[-1]=''\n while '.'in segments:\n segments.remove('.')\n while 1:\n i=1\n n=len(segments)-1\n while i =2 and segments[-1]=='..':\n segments[-2:]=['']\n return _coerce_result(urlunparse((scheme,netloc,'/'.join(segments),\n params,query,fragment)))\n \ndef urldefrag(url):\n ''\n\n\n\n\n \n url,_coerce_result=_coerce_args(url)\n if '#'in url:\n s,n,p,a,q,frag=urlparse(url)\n defrag=urlunparse((s,n,p,a,q,''))\n else :\n frag=''\n defrag=url\n return _coerce_result(DefragResult(defrag,frag))\n \n_hexdig='0123456789ABCDEFabcdef'\n_hextobyte={(a+b).encode():bytes([int(a+b,16)])\nfor a in _hexdig for b in _hexdig}\n\ndef unquote_to_bytes(string):\n ''\n \n \n if not string:\n \n string.split\n return b''\n if isinstance(string,str):\n string=string.encode('utf-8')\n bits=string.split(b'%')\n if len(bits)==1:\n return string\n res=[bits[0]]\n append=res.append\n for item in bits[1:]:\n try :\n append(_hextobyte[item[:2]])\n append(item[2:])\n except KeyError:\n append(b'%')\n append(item)\n return b''.join(res)\n \n_asciire=re.compile('([\\x00-\\x7f]+)')\n\ndef unquote(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n \n if '%'not in string:\n string.split\n return string\n if encoding is None :\n encoding='utf-8'\n if errors is None :\n errors='replace'\n bits=_asciire.split(string)\n res=[bits[0]]\n append=res.append\n for i in range(1,len(bits),2):\n append(unquote_to_bytes(bits[i]).decode(encoding,errors))\n append(bits[i+1])\n return ''.join(res)\n \ndef parse_qs(qs,keep_blank_values=False ,strict_parsing=False ,\nencoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n parsed_result={}\n pairs=parse_qsl(qs,keep_blank_values,strict_parsing,\n encoding=encoding,errors=errors)\n for name,value in pairs:\n if name in parsed_result:\n parsed_result[name].append(value)\n else :\n parsed_result[name]=[value]\n return parsed_result\n \ndef parse_qsl(qs,keep_blank_values=False ,strict_parsing=False ,\nencoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n qs,_coerce_result=_coerce_args(qs)\n pairs=[s2 for s1 in qs.split('&')for s2 in s1.split(';')]\n r=[]\n for name_value in pairs:\n if not name_value and not strict_parsing:\n continue\n nv=name_value.split('=',1)\n if len(nv)!=2:\n if strict_parsing:\n raise ValueError(\"bad query field: %r\"%(name_value,))\n \n if keep_blank_values:\n nv.append('')\n else :\n continue\n if len(nv[1])or keep_blank_values:\n name=nv[0].replace('+',' ')\n name=unquote(name,encoding=encoding,errors=errors)\n name=_coerce_result(name)\n value=nv[1].replace('+',' ')\n value=unquote(value,encoding=encoding,errors=errors)\n value=_coerce_result(value)\n r.append((name,value))\n return r\n \ndef unquote_plus(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n \n string=string.replace('+',' ')\n return unquote(string,encoding,errors)\n \n_ALWAYS_SAFE=frozenset(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\nb'abcdefghijklmnopqrstuvwxyz'\nb'0123456789'\nb'_.-')\n_ALWAYS_SAFE_BYTES=bytes(_ALWAYS_SAFE)\n_safe_quoters={}\n\nclass Quoter(collections.defaultdict):\n ''\n\n\n\n \n \n \n def __init__(self,safe):\n ''\n self.safe=_ALWAYS_SAFE.union(safe)\n \n def __repr__(self):\n \n return \"\"%dict(self)\n \n def __missing__(self,b):\n \n res=chr(b)if b in self.safe else '%{:02X}'.format(b)\n self[b]=res\n return res\n \ndef quote(string,safe='/',encoding=None ,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(string,str):\n if not string:\n return string\n if encoding is None :\n encoding='utf-8'\n if errors is None :\n errors='strict'\n string=string.encode(encoding,errors)\n else :\n if encoding is not None :\n raise TypeError(\"quote() doesn't support 'encoding' for bytes\")\n if errors is not None :\n raise TypeError(\"quote() doesn't support 'errors' for bytes\")\n return quote_from_bytes(string,safe)\n \ndef quote_plus(string,safe='',encoding=None ,errors=None ):\n ''\n\n\n \n \n \n if ((isinstance(string,str)and ' 'not in string)or\n (isinstance(string,bytes)and b' 'not in string)):\n return quote(string,safe,encoding,errors)\n if isinstance(safe,str):\n space=' '\n else :\n space=b' '\n string=quote(string,safe+space,encoding,errors)\n return string.replace(' ','+')\n \ndef quote_from_bytes(bs,safe='/'):\n ''\n\n\n \n if not isinstance(bs,(bytes,bytearray)):\n raise TypeError(\"quote_from_bytes() expected bytes\")\n if not bs:\n return ''\n if isinstance(safe,str):\n \n safe=safe.encode('ascii','ignore')\n else :\n safe=bytes([c for c in safe if c <128])\n if not bs.rstrip(_ALWAYS_SAFE_BYTES+safe):\n return bs.decode()\n try :\n quoter=_safe_quoters[safe]\n except KeyError:\n _safe_quoters[safe]=quoter=Quoter(safe).__getitem__\n return ''.join([quoter(char)for char in bs])\n \ndef urlencode(query,doseq=False ,safe='',encoding=None ,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if hasattr(query,\"items\"):\n query=query.items()\n else :\n \n \n try :\n \n \n if len(query)and not isinstance(query[0],tuple):\n raise TypeError\n \n \n \n \n except TypeError:\n ty,va,tb=sys.exc_info()\n raise TypeError(\"not a valid non-string sequence \"\n \"or mapping object\").with_traceback(tb)\n \n l=[]\n if not doseq:\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_plus(k,safe)\n else :\n k=quote_plus(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_plus(v,safe)\n else :\n v=quote_plus(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else :\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_plus(k,safe)\n else :\n k=quote_plus(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_plus(v,safe)\n l.append(k+'='+v)\n elif isinstance(v,str):\n v=quote_plus(v,safe,encoding,errors)\n l.append(k+'='+v)\n else :\n try :\n \n x=len(v)\n except TypeError:\n \n v=quote_plus(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else :\n \n for elt in v:\n if isinstance(elt,bytes):\n elt=quote_plus(elt,safe)\n else :\n elt=quote_plus(str(elt),safe,encoding,errors)\n l.append(k+'='+elt)\n return '&'.join(l)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef to_bytes(url):\n ''\n \n \n \n if isinstance(url,str):\n try :\n url=url.encode(\"ASCII\").decode()\n except UnicodeError:\n raise UnicodeError(\"URL \"+repr(url)+\n \" contains non-ASCII characters\")\n return url\n \ndef unwrap(url):\n ''\n url=str(url).strip()\n if url[:1]=='<'and url[-1:]=='>':\n url=url[1:-1].strip()\n if url[:4]=='URL:':url=url[4:].strip()\n return url\n \n_typeprog=None\ndef splittype(url):\n ''\n global _typeprog\n if _typeprog is None :\n import re\n _typeprog=re.compile('^([^/:]+):')\n \n match=_typeprog.match(url)\n if match:\n scheme=match.group(1)\n return scheme.lower(),url[len(scheme)+1:]\n return None ,url\n \n_hostprog=None\ndef splithost(url):\n ''\n global _hostprog\n if _hostprog is None :\n import re\n _hostprog=re.compile('^//([^/?]*)(.*)$')\n \n match=_hostprog.match(url)\n if match:\n host_port=match.group(1)\n path=match.group(2)\n if path and not path.startswith('/'):\n path='/'+path\n return host_port,path\n return None ,url\n \n_userprog=None\ndef splituser(host):\n ''\n global _userprog\n if _userprog is None :\n import re\n _userprog=re.compile('^(.*)@(.*)$')\n \n match=_userprog.match(host)\n if match:return match.group(1,2)\n return None ,host\n \n_passwdprog=None\ndef splitpasswd(user):\n ''\n global _passwdprog\n if _passwdprog is None :\n import re\n _passwdprog=re.compile('^([^:]*):(.*)$',re.S)\n \n match=_passwdprog.match(user)\n if match:return match.group(1,2)\n return user,None\n \n \n_portprog=None\ndef splitport(host):\n ''\n global _portprog\n if _portprog is None :\n import re\n _portprog=re.compile('^(.*):([0-9]+)$')\n \n match=_portprog.match(host)\n if match:return match.group(1,2)\n return host,None\n \n_nportprog=None\ndef splitnport(host,defport=-1):\n ''\n\n\n \n global _nportprog\n if _nportprog is None :\n import re\n _nportprog=re.compile('^(.*):(.*)$')\n \n match=_nportprog.match(host)\n if match:\n host,port=match.group(1,2)\n try :\n if not port:raise ValueError(\"no digits\")\n nport=int(port)\n except ValueError:\n nport=None\n return host,nport\n return host,defport\n \n_queryprog=None\ndef splitquery(url):\n ''\n global _queryprog\n if _queryprog is None :\n import re\n _queryprog=re.compile('^(.*)\\?([^?]*)$')\n \n match=_queryprog.match(url)\n if match:return match.group(1,2)\n return url,None\n \n_tagprog=None\ndef splittag(url):\n ''\n global _tagprog\n if _tagprog is None :\n import re\n _tagprog=re.compile('^(.*)#([^#]*)$')\n \n match=_tagprog.match(url)\n if match:return match.group(1,2)\n return url,None\n \ndef splitattr(url):\n ''\n \n words=url.split(';')\n return words[0],words[1:]\n \n_valueprog=None\ndef splitvalue(attr):\n ''\n global _valueprog\n if _valueprog is None :\n import re\n _valueprog=re.compile('^([^=]*)=(.*)$')\n \n match=_valueprog.match(attr)\n if match:return match.group(1,2)\n return attr,None\n", ["collections", "re", "sys"]], "urllib.request": [".py", "from browser import ajax\nfrom . import error\n\nclass FileIO:\n def __init__(self,data):\n self._data=data\n \n def read(self):\n return self._data\n \ndef urlopen(url,data=None ,timeout=None ):\n global result\n result=None\n \n def on_complete(req):\n global result\n if req.status ==200:\n result=req\n \n _ajax=ajax.ajax()\n _ajax.bind('complete',on_complete)\n if timeout is not None :\n _ajax.set_timeout(timeout)\n \n if data is None :\n _ajax.open('GET',url,False )\n _ajax.send()\n else :\n _ajax.open('POST',url,False )\n _ajax.send(data)\n \n if result is not None :\n if isinstance(result.text,str):\n return FileIO(result.text)\n \n return FileIO(result.text())\n raise error.HTTPError('file not found')\n", ["browser", "browser.ajax", "urllib", "urllib.error"]], "urllib": [".py", "", [], 1]} - +var scripts = {"$timestamp": 1604435700274, "array": [".js", "var $module = (function($B){\n\nvar _b_ = $B.builtins,\n $s = [],\n i\nfor(var $b in _b_){$s.push('var ' + $b +' = _b_[\"'+$b+'\"]')}\neval($s.join(';'))\n\nvar typecodes = {\n 'b': Int8Array, // signed char, 1 byte\n 'B': Uint8Array, // unsigned char, 1\n 'u': null, // Py_UNICODE Unicode character, 2 (deprecated)\n 'h': Int16Array, // signed short, 2\n 'H': Uint16Array, // unsigned short, 2\n 'i': Int16Array, // signed int, 2\n 'I': Uint16Array, // unsigned int, 2\n 'l': Int32Array, // signed long, 4\n 'L': Uint32Array, // unsigned long, 4\n 'q': null, // signed long, 8 (not implemented)\n 'Q': null, // unsigned long, 8 (not implemented)\n 'f': Float32Array, // float, 4\n 'd': Float64Array // double float, 8\n}\n\nvar array = $B.make_class(\"array\",\n function(){\n var missing = {},\n $ = $B.args(\"array\", 2, {typecode: null, initializer: null},\n [\"typecode\", \"initializer\"], arguments, {initializer: missing},\n null, null),\n typecode = $.typecode,\n initializer = $.initializer\n if(! typecodes.hasOwnProperty(typecode)){\n throw _b_.ValueError.$factory(\"bad typecode (must be b, \" +\n \"B, u, h, H, i, I, l, L, q, Q, f or d)\")\n }\n if(typecodes[typecode] === null){\n throw _b_.NotImplementedError.$factory(\"type code \" +\n typecode + \"is not implemented\")\n }\n var res = {\n __class__: array,\n typecode: typecode,\n obj: null\n }\n if(initializer !== missing){\n if(Array.isArray(initializer)){\n array.fromlist(res, initializer)\n }else if(_b_.isinstance(initializer, _b_.bytes)){\n array.frombytes(res, initializer)\n }else{\n array.extend(res, initializer)\n }\n }\n return res\n }\n)\n\narray.$buffer_protocol = true\n\narray.__getitem__ = function(self, key){\n if(self.obj && self.obj[key] !== undefined){\n return self.obj[key]\n }\n throw _b_.IndexError(\"array index out of range\")\n}\n\nvar array_iterator = $B.make_iterator_class(\"array_iterator\")\narray.__iter__ = function(self){\n return array_iterator.$factory(self.obj)\n}\n\narray.__len__ = function(self){\n return self.obj.length\n}\n\narray.__str__ = function(self){\n $B.args(\"__str__\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var res = \"array('\" + self.typecode + \"'\"\n if(self.obj !== null){\n res += \", [\" + self.obj + \"]\"\n }\n return res + \")\"\n}\n\nfunction normalize_index(self, i){\n // return an index i between 0 and self.obj.length - 1\n if(i < 0){\n i = self.obj.length + i\n }\n if(i < 0){i = 0}\n else if(i > self.obj.length - 1){\n i = self.obj.length\n }\n return i\n}\n\narray.append = function(self, value){\n $B.args(\"append\", 2, {self: null, value: null},\n [\"self\", \"value\"], arguments, {}, null, null)\n var pos = self.obj === null ? 0 : self.obj.length\n return array.insert(self, pos, value)\n}\n\narray.count = function(self, x){\n $B.args(\"count\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n if(self.obj === null){return 0}\n return self.obj.filter(function(item){return item == x}).length\n}\n\narray.extend = function(self, iterable){\n $B.args(\"extend\", 2, {self: null, iterable: null},\n [\"self\", \"iterable\"], arguments, {}, null, null)\n if(iterable.__class__ === array){\n if(iterable.typecode !== self.typecode){\n throw _b_.TypeError.$factory(\"can only extend with array \" +\n \"of same kind\")\n }\n if(iterable.obj === null){return _b_.None}\n // create new object with length = sum of lengths\n var newobj = new typecodes[self.typecode](self.obj.length +\n iterable.obj.length)\n // copy self.obj\n newobj.set(self.obj)\n // copy iterable.obj\n newobj.set(iterable.obj, self.obj.length)\n self.obj = newobj\n }else{\n var it = _b_.iter(iterable)\n while(true){\n try{\n var item = _b_.next(it)\n array.append(self, item)\n }catch(err){\n if(err.__class__ !== _b_.StopIteration){\n throw err\n }\n break\n }\n }\n }\n return _b_.None\n}\n\narray.frombytes = function(self, s){\n $B.args(\"frombytes\", 2, {self: null, s: null},\n [\"self\", \"s\"], arguments, {}, null, null)\n if(! _b_.isinstance(s, _b_.bytes)){\n throw _b_.TypeError.$factory(\"a bytes-like object is required, \" +\n \"not '\" + $B.class_name(s) + \"'\")\n }\n self.obj = new typecodes[self.typecode](s.source)\n return None\n}\n\narray.fromlist = function(self, list){\n $B.args(\"fromlist\", 2, {self: null, list: null},\n [\"self\", \"list\"], arguments, {}, null, null)\n var it = _b_.iter(list)\n while(true){\n try{\n var item = _b_.next(it)\n try{\n array.append(self, item)\n }catch(err){\n console.log(err)\n return _b_.None\n }\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n return _b_.None\n }\n throw err\n }\n }\n}\n\narray.fromstring = array.frombytes\n\narray.index = function(self, x){\n $B.args(\"index\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.index(x): x not in array\")\n }\n return res\n}\n\narray.insert = function(self, i, value){\n $B.args(\"insert\", 3, {self: null, i: null, value: null},\n [\"self\", \"i\", \"value\"], arguments, {}, null, null)\n if(self.obj === null){\n self.obj = [value]\n }else{\n self.obj.splice(i, 0, value)\n }\n return _b_.None\n}\n\narray.itemsize = function(self){\n return typecodes[self.typecode].BYTES_PER_ELEMENT\n}\n\narray.pop = function(self, i){\n var $ = $B.args(\"count\", 2, {self: null, i: null},\n [\"self\", \"i\"], arguments, {i: -1}, null, null)\n i = $.i\n if(self.obj === null){\n throw _b_.IndexError.$factory(\"pop from empty array\")\n }else if(self.obj.length == 1){\n var res = self.obj[0]\n self.obj = null\n return res\n }\n i = normalize_index(self, i)\n // store value to return\n var res = self.obj[i]\n // create new array, size = previous size - 1\n var newobj = new typecodes[self.typecode](self.obj.length - 1)\n // fill new array with values until i excluded\n newobj.set(self.obj.slice(0, i))\n // fill with values after i\n newobj.set(self.obj.slice(i + 1), i)\n // set self.obj to new array\n self.obj = newobj\n // return stored value\n return res\n}\n\narray.remove = function(self, x){\n $B.args(\"remove\", 2, {self: null, x: null},\n [\"self\", \"x\"], arguments, {}, null, null)\n var res = self.obj.findIndex(function(item){return x == item})\n if(res == -1){\n throw _b_.ValueError.$factory(\"array.remove(x): x not in array\")\n }\n array.pop(self, res)\n return _b_.None\n}\n\narray.reverse = function(self){\n $B.args(\"reverse\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n if(self.obj === null){return _b_.None}\n self.obj.reverse()\n return _b_.None\n}\n\narray.tobytes = function(self){\n $B.args(\"tobytes\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n var items = Array.prototype.slice.call(self.obj),\n res = []\n items.forEach(function(item){\n while(item > 256){\n res.push(item % 256)\n item = Math.floor(item / 256)\n }\n res.push(item)\n })\n return _b_.bytes.$factory(res)\n}\n\narray.tolist = function(self){\n $B.args(\"tolist\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n return Array.prototype.slice.call(self.obj)\n}\n\narray.tostring = array.tobytes\n\narray.typecode = function(self){\n return self.typecode\n}\n\n$B.set_func_names(array, \"array\")\n\nreturn {\n array: array,\n typecodes: Object.keys(typecodes).join('')\n}\n\n})(__BRYTHON__)\n"], "builtins": [".js", "var $module = (function(){\n var obj = {\n __class__: __BRYTHON__.module,\n __name__: 'builtins'\n },\n builtin_names = ['ArithmeticError', 'AssertionError', 'AttributeError',\n 'BaseException', 'BlockingIOError', 'BrokenPipeError', 'BufferError',\n 'BytesWarning', 'ChildProcessError', 'ConnectionAbortedError',\n 'ConnectionError', 'ConnectionRefusedError', 'ConnectionResetError',\n 'DeprecationWarning', 'EOFError', 'Ellipsis', 'EnvironmentError', 'Exception',\n 'False', 'FileExistsError', 'FileNotFoundError', 'FloatingPointError',\n 'FutureWarning', 'GeneratorExit', 'IOError', 'ImportError', 'ImportWarning',\n 'IndentationError', 'IndexError', 'InterruptedError', 'IsADirectoryError',\n 'KeyError', 'KeyboardInterrupt', 'LookupError', 'MemoryError', 'NameError',\n 'None', 'NotADirectoryError', 'NotImplemented', 'NotImplementedError',\n 'OSError', 'OverflowError', 'PendingDeprecationWarning', 'PermissionError',\n 'ProcessLookupError', 'ReferenceError', 'ResourceWarning', 'RuntimeError',\n 'RuntimeWarning', 'StopIteration', 'SyntaxError', 'SyntaxWarning',\n 'SystemError', 'SystemExit', 'TabError', 'TimeoutError', 'True', 'TypeError',\n 'UnboundLocalError', 'UnicodeDecodeError', 'UnicodeEncodeError',\n 'UnicodeError', 'UnicodeTranslateError', 'UnicodeWarning', 'UserWarning',\n 'ValueError', 'Warning', 'WindowsError', 'ZeroDivisionError', '_',\n '__build_class__', '__debug__', '__doc__', '__import__', '__name__',\n '__package__', 'abs', 'all', 'any', 'ascii', 'bin', 'bool', 'bytearray',\n 'bytes','callable', 'chr', 'classmethod', 'compile', 'complex', 'copyright',\n 'credits','delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'exec',\n 'exit', 'filter', 'float', 'format', 'frozenset', 'getattr', 'globals',\n 'hasattr', 'hash', 'help', 'hex', 'id', 'input', 'int', 'isinstance',\n 'issubclass', 'iter', 'len', 'license', 'list', 'locals', 'map', 'max',\n 'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print',\n 'property', 'quit', 'range', 'repr', 'reversed', 'round', 'set', 'setattr',\n 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',\n 'vars', 'zip',\n '__newobj__' // defined in py_objects.js ; required for pickle\n ]\n for(var i = 0, len = builtin_names.length; i < len; i++){\n try{eval(\"obj['\" + builtin_names[i] + \"'] = __BRYTHON__.builtins.\" +\n builtin_names[i])}\n catch(err){if (__BRYTHON__.$debug) {console.log(err)}}\n }\n return obj\n})()\n"], "dis": [".js", "var $module=(function($B){\n\nvar dict = $B.builtins.dict\nvar mod = {\n dis:function(src){\n $B.$py_module_path['__main__'] = $B.brython_path\n return __BRYTHON__.py2js(src,'__main__','__main__',\n $B.builtins_scope).to_js()\n },\n OPTIMIZED: 1,\n NEWLOCALS: 2,\n VARARGS: 4,\n VARKEYWORDS: 8,\n NESTED: 16,\n GENERATOR: 32,\n NOFREE: 64,\n COROUTINE: 128,\n ITERABLE_COROUTINE: 256,\n ASYNC_GENERATOR: 512,\n COMPILER_FLAG_NAMES: $B.builtins.dict.$factory()\n}\nmod.COMPILER_FLAG_NAMES = dict.$factory([\n [1, \"OPTIMIZED\"],\n [2, \"NEWLOCALS\"],\n [4, \"VARARGS\"],\n [8, \"VARKEYWORDS\"],\n [16, \"NESTED\"],\n [32, \"GENERATOR\"],\n [64, \"NOFREE\"],\n [128, \"COROUTINE\"],\n [256, \"ITERABLE_COROUTINE\"],\n [512, \"ASYNC_GENERATOR\"]\n])\n\nreturn mod\n\n})(__BRYTHON__)"], "hashlib": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins\n\nvar $s = []\nfor(var $b in _b_){$s.push('var ' + $b +' = _b_[\"'+$b+'\"]')}\neval($s.join(';'))\n\nvar $mod = {\n\n __getattr__ : function(attr){\n if(attr == 'new'){return hash.$factory}\n return this[attr]\n },\n md5: function(obj){return hash.$factory('md5', obj)},\n sha1: function(obj){return hash.$factory('sha1', obj)},\n sha224: function(obj){return hash.$factory('sha224', obj)},\n sha256: function(obj){return hash.$factory('sha256', obj)},\n sha384: function(obj){return hash.$factory('sha384', obj)},\n sha512: function(obj){return hash.$factory('sha512', obj)},\n\n algorithms_guaranteed: ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'],\n algorithms_available: ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']\n}\n\n//todo: eventually move this function to a \"utility\" file or use ajax module?\nfunction $get_CryptoJS_lib(alg){\n if($B.VFS !== undefined && $B.VFS.hashlib){\n // use file in brython_stdlib.js\n var lib = $B.VFS[\"crypto_js.rollups.\" + alg]\n if (lib === undefined){\n throw _b_.ImportError.$factory(\"can't import hashlib.\" + alg)\n }\n var res = lib[1]\n try{\n eval(res + \"; $B.CryptoJS = CryptoJS;\")\n return\n }catch(err){\n throw Error(\"JS Eval Error\",\n \"Cannot eval CryptoJS algorithm '\" + alg + \"' : error:\" + err)\n }\n }\n\n var module = {__name__: 'CryptoJS', $is_package: false}\n var res = $B.$download_module(module, $B.brython_path + 'libs/crypto_js/rollups/' + alg + '.js');\n\n try{\n eval(res + \"; $B.CryptoJS = CryptoJS;\")\n }catch(err){\n throw Error(\"JS Eval Error\",\n \"Cannot eval CryptoJS algorithm '\" + alg + \"' : error:\" + err)\n }\n}\n\nfunction bytes2WordArray(obj){\n // Transform a bytes object into an instance of class WordArray\n // defined in CryptoJS\n if(!_b_.isinstance(obj, _b_.bytes)){\n throw _b_.TypeError(\"expected bytes, got \" + $B.class_name(obj))\n }\n\n var words = []\n for(var i = 0; i < obj.source.length; i += 4){\n var word = obj.source.slice(i, i + 4)\n while(word.length < 4){word.push(0)}\n var w = word[3] + (word[2] << 8) + (word[1] << 16) + (word[0] << 24)\n words.push(w)\n }\n return {words: words, sigBytes: obj.source.length}\n}\n\nvar hash = {\n __class__: _b_.type,\n __mro__: [_b_.object],\n $infos:{\n __name__: 'hash'\n }\n}\n\nhash.update = function(self, msg){\n self.hash.update(bytes2WordArray(msg))\n}\n\nhash.copy = function(self){\n return self.hash.clone()\n}\n\nhash.digest = function(self){\n var obj = self.hash.clone().finalize().toString(),\n res = []\n for(var i = 0; i < obj.length; i += 2){\n res.push(parseInt(obj.substr(i, 2), 16))\n }\n return _b_.bytes.$factory(res)\n}\n\nhash.hexdigest = function(self) {\n return self.hash.clone().finalize().toString()\n}\n\nhash.$factory = function(alg, obj) {\n var res = {\n __class__: hash\n }\n\n switch(alg) {\n case 'md5':\n case 'sha1':\n case 'sha224':\n case 'sha256':\n case 'sha384':\n case 'sha512':\n var ALG = alg.toUpperCase()\n if($B.Crypto === undefined ||\n $B.CryptoJS.algo[ALG] === undefined){$get_CryptoJS_lib(alg)}\n\n res.hash = $B.CryptoJS.algo[ALG].create()\n if(obj !== undefined){\n res.hash.update(bytes2WordArray(obj))\n }\n break\n default:\n throw $B.builtins.AttributeError.$factory('Invalid hash algorithm: ' + alg)\n }\n\n return res\n}\n\nreturn $mod\n\n})(__BRYTHON__)\n"], "long_int": [".js", "/*\nModule to manipulate long integers\n*/\n\nvar $module=(function($B){\n\neval($B.InjectBuiltins())\n\nvar $LongIntDict = {__class__:$B.$type,__name__:'LongInt'}\n\nfunction add_pos(v1, v2){\n // Add two positive numbers\n // v1, v2 : strings\n // Return an instance of LongInt\n\n var res = '', carry = 0, iself=v1.length, sv=0\n for(var i=v2.length-1;i>=0;i--){\n iself--\n if(iself<0){sv=0}else{sv=parseInt(v1.charAt(iself))}\n x = (carry+sv+parseInt(v2.charAt(i))).toString()\n if(x.length==2){res=x.charAt(1)+res;carry=parseInt(x.charAt(0))}\n else{res=x+res;carry=0}\n }\n while(iself>0){\n iself--\n x = (carry+parseInt(v1.charAt(iself))).toString()\n if(x.length==2){res=x.charAt(1)+res;carry=parseInt(x.charAt(0))}\n else{res=x+res;carry=0}\n }\n if(carry){res=carry+res} \n return {__class__:$LongIntDict, value:res, pos:true}\n}\n\nfunction check_shift(shift){\n // Check the argument of >> and <<\n if(!isinstance(shift, LongInt)){\n throw TypeError(\"shift must be LongInt, not '\"+\n $B.get_class(shift).__name__+\"'\")\n }\n if(!shift.pos){throw ValueError(\"negative shift count\")}\n}\n\nfunction clone(obj){\n // Used for traces\n var obj1 = {}\n for(var attr in obj){obj1[attr]=obj[attr]}\n return obj1\n}\n\nfunction comp_pos(v1, v2){\n // Compare two positive numbers\n if(v1.length>v2.length){return 1}\n else if(v1.lengthv2){return 1}\n else if(v11){\n // If the value in cols[i] has more than one digit, only keep the\n // last one and report the others at the right index\n // For instance if cols[i] = 123, keep 3 in cols[i], add 2 to\n // cols[i-1] and 1 to cols[i-2]\n cols[i] = parseInt(col.charAt(col.length-1))\n j = col.length\n while(j-->1){\n var report = parseInt(col.charAt(j-1))\n var pos = i-col.length+j\n if(cols[pos]===undefined){cols[pos]=report}\n else{cols[pos] += report}\n }\n }\n }\n\n // Find minimum index in cols\n // The previous loop may have introduced negative indices\n var imin\n for(var attr in cols){\n i = parseInt(attr)\n if(imin===undefined){imin=i}\n else if(i=v2\n\n var res = '', carry = 0, i1=v1.length, sv=0\n \n // For all digits in v2, starting by the rightmost, substract it from\n // the matching digit in v1\n // This is the equivalent of the manual operation :\n // 12345678\n // - 98765\n //\n // We begin by the rightmost operation : 8-5 (3, no carry),\n // then 7-6 (1, no carry)\n // then 6-7 (9, carry 1) and so on\n for(var i=v2.length-1;i>=0;i--){\n i1--\n sv = parseInt(v1.charAt(i1))\n x = (sv-carry-parseInt(v2.charAt(i)))\n if(x<0){res=(10+x)+res;carry=1}\n else{res=x+res;carry=0}\n }\n \n // If there are remaining digits in v1, substract the carry, if any\n while(i1>0){\n i1--\n x = (parseInt(v1.charAt(i1))-carry)\n if(x<0){res=(10+x)+res;carry=1}\n else{res=x+res;carry=0}\n }\n\n // Remove leading zeros and return the result\n while(res.charAt(0)=='0' && res.length>1){res=res.substr(1)}\n return {__class__:$LongIntDict, value:res, pos:true}\n}\n\n// Special methods to implement operations on instances of LongInt\n\n$LongIntDict.__abs__ = function(self){\n return {__class__:$LongIntDict, value: self.value, pos:true}\n}\n\n$LongIntDict.__add__ = function(self, other){\n if (typeof other == 'number') other=LongInt(_b_.str.$factory(other))\n // Addition of \"self\" and \"other\"\n // If both have the same sign (+ or -) we add their absolute values\n // If they have different sign we use the substraction of their\n // absolute values\n var res\n if(self.pos&&other.pos){ // self > 0, other > 0\n return add_pos(self.value, other.value)\n }else if(!self.pos&&!other.pos){ // self < 0, other < 0\n res = add_pos(self.value, other.value)\n res.pos = false\n return res\n }else if(self.pos && !other.pos){ // self > 0, other < 0\n switch (comp_pos(self.value, other.value)){\n case 1:\n res = sub_pos(self.value, other.value)\n break\n case 0:\n res = {__class__:$LongIntDict, value:0, pos:true}\n break\n case -1:\n res = sub_pos(other.value, self.value)\n res.pos = false\n break\n }\n return res\n }else{ // self < 0, other > 0\n switch(comp_pos(self.value, other.value)){\n case 1:\n res = sub_pos(self.value, other.value)\n res.pos = false\n break\n case 0:\n res = {__class__:$LongIntDict, value:0, pos:true}\n break\n case -1:\n res = sub_pos(other.value, self.value)\n break\n }\n return res\n }\n}\n\n$LongIntDict.__and__ = function(self, other){\n if (typeof other == 'number') other=LongInt(_b_.str.$factory(other))\n // Bitwise \"and\" : build the binary representation of self and other\n var v1 = $LongIntDict.__index__(self)\n var v2 = $LongIntDict.__index__(other)\n // apply \"and\" on zeros and ones\n if(v1.lengthother.value.length){return true}\n else if(self.value.length= other.value}\n}\n\n$LongIntDict.__gt__ = function(self, other){\n return !$LongIntDict.__le__(self, other)\n}\n\n$LongIntDict.__index__ = function(self){\n // Used by bin()\n // returns a string with the binary value of self\n // The algorithm computes the result of the floor division of self by 2\n \n // XXX to do : negative integers\n \n var res = '', pos=self.value.length,\n temp = self.value, d\n while(true){\n d = divmod_pos(temp, '2')\n res = d[1].value + res\n temp = d[0].value\n if(temp=='0'){break}\n }\n return res\n}\n\n$LongIntDict.__invert__ = function(self){\n var bin = $LongIntDict.__index__(self)\n var res = ''\n for(var i=0;iother.value.length){return false}\n else if(self.value.length=0;i--){\n x = (carry+parseInt(res.charAt(i))*2).toString()\n if(x.length==2){res1=x.charAt(1)+res1;carry=parseInt(x.charAt(0))}\n else{res1=x+res1;carry=0}\n }\n if(carry){res1=carry+res1}\n res=res1\n shift = sub_pos(shift.value, '1')\n if(shift.value=='0'){break}\n }\n return {__class__:$LongIntDict, value:res, pos:self.pos}\n}\n\n$LongIntDict.__mod__ = function(self, other){\n return $LongIntDict.__divmod__(self, other)[1]\n}\n\n$LongIntDict.__mro__ = [_b_.object]\n\n$LongIntDict.__mul__ = function(self, other){\n if (typeof other == 'number') other=LongInt(_b_.str.$factory(other))\n var res = mul_pos(self.value, other.value)\n if(self.pos==other.pos){return res}\n res.pos = false\n return res\n}\n\n$LongIntDict.__neg__ = function(obj){\n return {__class__:$LongIntDict, value:obj.value, pos:!obj.pos}\n}\n\n$LongIntDict.__or__ = function(self, other){\n var v1 = $LongIntDict.__index__(self)\n var v2 = $LongIntDict.__index__(other)\n if(v1.length0){\n var dm = divmod_pos(v, base.toString())\n res = parseInt(dm[1].value).toString(base)+res\n v = dm[0].value\n if(v==0){break}\n }\n return res\n}\n\nfunction digits(base){\n // Return an object where keys are all the digits valid in specified base\n // and value is \"true\"\n // Used to test if the string passed as first argument to LongInt is valid\n var is_digits = {}\n // Number from 0 to base, or from 0 to 9 if base > 10\n for(var i=0;i10){\n // Additional letters\n // For instance in base 16, add \"abcdefABCDEF\" as keys\n for(var i=0;i2){\n throw _b_.TypeError(\"LongInt takes at most 2 arguments (\"+\n arguments.length+\" given)\")\n }\n // base defaults to 10\n if(base===undefined){base = 10}\n else if(!isinstance(base, int)){\n throw TypeError(\"'\"+$B.get_class(base).__name__+\"' object cannot be interpreted as an integer\")\n }\n if(base<0 || base==1 || base>36){\n throw ValueError(\"LongInt() base must be >= 2 and <= 36\")\n }\n if(isinstance(value, float)){\n if(value>=0){value=Math.round(value.value)}\n else{value=Math.ceil(value.value)}\n }\n if(typeof value=='number'){\n if(isSafeInteger(value)){value = value.toString()}\n else{throw ValueError(\"argument of long_int is not a safe integer\")}\n }else if(typeof value!='string'){\n throw ValueError(\"argument of long_int must be a string, not \"+\n $B.get_class(value).__name__)\n }\n var has_prefix = false, pos = true, start = 0\n // Strip leading and trailing whitespaces\n while(value.charAt(0)==' ' && value.length){value = value.substr(1)}\n while(value.charAt(value.length-1)==' ' && value.length){\n value = value.substr(0, value.length-1)\n }\n // Check if string starts with + or -\n if(value.charAt(0)=='+'){has_prefix=true}\n else if(value.charAt(0)=='-'){has_prefix=true;pos=false}\n if(has_prefix){\n // Remove prefix\n if(value.length==1){\n // \"+\" or \"-\" alone are not valid arguments\n throw ValueError('LongInt argument is not a valid number: \"'+value+'\"')\n }else{value=value.substr(1)}\n }\n // Ignore leading zeros\n while(start Math.pow(2, 32)}\n\n// Big number Library from jsfromhell.com\n// This library helps with producing \"correct\" results from\n// mathematic operations\n\n//+ Jonas Raoni Soares Silva\n//@ http://jsfromhell.com/classes/bignumber [rev. #4]\n\n\nvar BigNumber = function(n, p, r){\n var o = this, i\n if(n instanceof BigNumber){\n for(i in {precision: 0, roundType: 0, _s: 0, _f: 0}){o[i] = n[i]}\n o._d = n._d.slice()\n return\n }\n o.precision = isNaN(p = Math.abs(p)) ? BigNumber.defaultPrecision : p\n o.roundType = isNaN(r = Math.abs(r)) ? BigNumber.defaultRoundType : r\n o._s = (n += \"\").charAt(0) == \"-\"\n o._f = ((n = n.replace(/[^\\d.]/g, \"\").split(\".\", 2))[0] =\n n[0].replace(/^0+/, \"\") || \"0\").length\n for(i = (n = o._d = (n.join(\"\") || \"0\").split(\"\")).length; i;\n n[--i] = +n[i]){}\n o.round()\n}\nwith({$: BigNumber, o: BigNumber.prototype}){\n $.ROUND_HALF_EVEN = ($.ROUND_HALF_DOWN = ($.ROUND_HALF_UP =\n ($.ROUND_FLOOR = ($.ROUND_CEIL = ($.ROUND_DOWN = ($.ROUND_UP = 0) + 1) +\n 1) + 1) + 1) + 1) + 1\n $.defaultPrecision = 40\n $.defaultRoundType = $.ROUND_HALF_UP\n o.add = function(n){\n if(this._s != (n = new BigNumber(n))._s){\n return n._s ^= 1, this.subtract(n)\n }\n var o = new BigNumber(this),\n a = o._d,\n b = n._d,\n la = o._f,\n lb = n._f,\n n = Math.max(la, lb),\n i,\n r\n la != lb && ((lb = la - lb) > 0 ? o._zeroes(b, lb, 1) :\n o._zeroes(a, -lb, 1))\n i = (la = a.length) == (lb = b.length) ? a.length :\n ((lb = la - lb) > 0 ? o._zeroes(b, lb) : o._zeroes(a, -lb)).length\n for(r = 0; i; r = (a[--i] = a[i] + b[i] + r) / 10 >>> 0, a[i] %= 10){}\n return r && ++n && a.unshift(r), o._f = n, o.round()\n };\n o.subtract = function(n){\n if(this._s != (n = new BigNumber(n))._s)\n return n._s ^= 1, this.add(n);\n var o = new BigNumber(this),\n c = o.abs().compare(n.abs()) + 1,\n a = c ? o : n,\n b = c ? n : o,\n la = a._f,\n lb = b._f,\n d = la,\n i,\n j;\n a = a._d, b = b._d, la != lb && ((lb = la - lb) > 0 ? o._zeroes(b, lb, 1) : o._zeroes(a, -lb, 1));\n for(i = (la = a.length) == (lb = b.length) ? a.length : ((lb = la - lb) > 0 ? o._zeroes(b, lb) : o._zeroes(a, -lb)).length; i;){\n if(a[--i] < b[i]){\n for(j = i; j && !a[--j]; a[j] = 9);\n --a[j], a[i] += 10;\n }\n b[i] = a[i] - b[i];\n }\n return c || (o._s ^= 1), o._f = d, o._d = b, o.round();\n };\n o.multiply = function(n){\n var o = new BigNumber(this), r = o._d.length >= (n = new BigNumber(n))._d.length, a = (r ? o : n)._d,\n b = (r ? n : o)._d, la = a.length, lb = b.length, x = new BigNumber, i, j, s;\n for(i = lb; i; r && s.unshift(r), x.set(x.add(new BigNumber(s.join(\"\")))))\n for(s = (new Array(lb - --i)).join(\"0\").split(\"\"), r = 0, j = la; j; r += a[--j] * b[i], s.unshift(r % 10), r = (r / 10) >>> 0);\n return o._s = o._s != n._s, o._f = ((r = la + lb - o._f - n._f) >= (j = (o._d = x._d).length) ? this._zeroes(o._d, r - j + 1, 1).length : j) - r, o.round();\n };\n o.divide = function(n){\n if((n = new BigNumber(n)) == \"0\")\n throw new Error(\"Division by 0\");\n else if(this == \"0\")\n return new BigNumber;\n var o = new BigNumber(this), a = o._d, b = n._d, la = a.length - o._f,\n lb = b.length - n._f, r = new BigNumber, i = 0, j, s, l, f = 1, c = 0, e = 0;\n r._s = o._s != n._s, r.precision = Math.max(o.precision, n.precision),\n r._f = +r._d.pop(), la != lb && o._zeroes(la > lb ? b : a, Math.abs(la - lb));\n n._f = b.length, b = n, b._s = false, b = b.round();\n for(n = new BigNumber; a[0] == \"0\"; a.shift());\n out:\n do{\n for(l = c = 0, n == \"0\" && (n._d = [], n._f = 0); i < a.length && n.compare(b) == -1; ++i){\n (l = i + 1 == a.length, (!f && ++c > 1 || (e = l && n == \"0\" && a[i] == \"0\")))\n && (r._f == r._d.length && ++r._f, r._d.push(0));\n (a[i] == \"0\" && n == \"0\") || (n._d.push(a[i]), ++n._f);\n if(e)\n break out;\n if((l && n.compare(b) == -1 && (r._f == r._d.length && ++r._f, 1)) || (l = 0))\n while(r._d.push(0), n._d.push(0), ++n._f, n.compare(b) == -1);\n }\n if(f = 0, n.compare(b) == -1 && !(l = 0))\n while(l ? r._d.push(0) : l = 1, n._d.push(0), ++n._f, n.compare(b) == -1);\n for(s = new BigNumber, j = 0; n.compare(y = s.add(b)) + 1 && ++j; s.set(y));\n n.set(n.subtract(s)), !l && r._f == r._d.length && ++r._f, r._d.push(j);\n }\n while((i < a.length || n != \"0\") && (r._d.length - r._f) <= r.precision);\n return r.round();\n };\n o.mod = function(n){\n return this.subtract(this.divide(n).intPart().multiply(n));\n };\n o.pow = function(n){\n var o = new BigNumber(this), i;\n if((n = (new BigNumber(n)).intPart()) == 0) return o.set(1);\n for(i = Math.abs(n); --i; o.set(o.multiply(this)));\n return n < 0 ? o.set((new BigNumber(1)).divide(o)) : o;\n };\n o.set = function(n){\n return this.constructor(n), this;\n };\n o.compare = function(n){\n var a = this, la = this._f, b = new BigNumber(n), lb = b._f, r = [-1, 1], i, l;\n if(a._s != b._s)\n return a._s ? -1 : 1;\n if(la != lb)\n return r[(la > lb) ^ a._s];\n for(la = (a = a._d).length, lb = (b = b._d).length, i = -1, l = Math.min(la, lb); ++i < l;)\n if(a[i] != b[i])\n return r[(a[i] > b[i]) ^ a._s];\n return la != lb ? r[(la > lb) ^ a._s] : 0;\n };\n o.negate = function(){\n var n = new BigNumber(this); return n._s ^= 1, n;\n };\n o.abs = function(){\n var n = new BigNumber(this); return n._s = 0, n;\n };\n o.intPart = function(){\n return new BigNumber((this._s ? \"-\" : \"\") + (this._d.slice(0, this._f).join(\"\") || \"0\"));\n };\n o.valueOf = o.toString = function(){\n var o = this;\n return (o._s ? \"-\" : \"\") + (o._d.slice(0, o._f).join(\"\") || \"0\") + (o._f != o._d.length ? \".\" + o._d.slice(o._f).join(\"\") : \"\");\n };\n o._zeroes = function(n, l, t){\n var s = [\"push\", \"unshift\"][t || 0];\n for(++l; --l; n[s](0));\n return n;\n };\n o.round = function(){\n if(\"_rounding\" in this) return this;\n var $ = BigNumber, r = this.roundType, b = this._d, d, p, n, x;\n for(this._rounding = true; this._f > 1 && !b[0]; --this._f, b.shift());\n for(d = this._f, p = this.precision + d, n = b[p]; b.length > d && !b[b.length -1]; b.pop());\n x = (this._s ? \"-\" : \"\") + (p - d ? \"0.\" + this._zeroes([], p - d - 1).join(\"\") : \"\") + 1;\n if(b.length > p){\n n && (r == $.DOWN ? false : r == $.UP ? true : r == $.CEIL ? !this._s\n : r == $.FLOOR ? this._s : r == $.HALF_UP ? n >= 5 : r == $.HALF_DOWN ? n > 5\n : r == $.HALF_EVEN ? n >= 5 && b[p - 1] & 1 : false) && this.add(x);\n b.splice(p, b.length - p);\n }\n return delete this._rounding, this;\n };\n}\n\nvar isNegZero = function(x) {return x === 0 && Math.atan2(x,x) < 0}\n\nvar _mod = {\n __getattr__: function(attr){\n $B.check_nb_args('__getattr__ ', 1, arguments)\n $B.check_no_kw('__getattr__ ', attr)\n\n var res = this[attr]\n if(res === undefined){\n throw _b_.AttributeError.$factory(\n 'module math has no attribute ' + attr)\n }\n return res\n },\n acos: function(x){\n $B.check_nb_args('acos', 1, arguments)\n $B.check_no_kw('acos', x)\n return float.$factory(Math.acos(float_check(x)))\n },\n acosh: function(x){\n $B.check_nb_args('acosh', 1, arguments)\n $B.check_no_kw('acosh', x)\n\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var y = float_check(x)\n return float.$factory(Math.log(y + Math.sqrt(y * y - 1)))\n },\n asin: function(x){\n $B.check_nb_args('asin', 1, arguments)\n $B.check_no_kw('asin', x)\n return float.$factory(Math.asin(float_check(x)))\n },\n asinh: function(x){\n $B.check_nb_args('asinh', 1, arguments)\n $B.check_no_kw('asinh', x)\n\n if(_b_.$isninf(x)){return float.$factory('-inf')}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var y = float_check(x)\n if(y == 0 && 1 / y === -Infinity){\n return new Number(-0.0)\n }\n return float.$factory(Math.asinh(y))\n },\n atan: function(x){\n $B.check_nb_args('atan', 1, arguments)\n $B.check_no_kw('atan', x)\n\n if(_b_.$isninf(x)){return float.$factory(-Math.PI / 2)}\n if(_b_.$isinf(x)){return float.$factory(Math.PI / 2)}\n return float.$factory(Math.atan(float_check(x)))\n },\n atan2: function(y, x){\n $B.check_nb_args('atan2', 2, arguments)\n $B.check_no_kw('atan2', y, x)\n\n return float.$factory(Math.atan2(float_check(y), float_check(x)))\n },\n atanh: function(x){\n $B.check_nb_args('atanh', 1, arguments)\n $B.check_no_kw('atanh', x)\n\n var y = float_check(x)\n if(y == 0){return 0}\n return float.$factory(0.5 * Math.log((1 / y + 1)/(1 / y - 1)));\n },\n ceil: function(x){\n $B.check_nb_args('ceil', 1, arguments)\n $B.check_no_kw('ceil', x)\n\n try{return getattr(x, '__ceil__')()}catch(err){}\n\n if(_b_.$isninf(x)){return float.$factory('-inf')}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n if(isNaN(x)){return float.$factory('nan')}\n\n var y = float_check(x)\n if(! isNaN(parseFloat(y)) && isFinite(y)){\n return int.$factory(Math.ceil(y))\n }\n\n throw _b_.ValueError.$factory(\n 'object is not a number and does not contain __ceil__')\n },\n comb: function(n, k){\n $B.check_nb_args('comb', 2, arguments)\n $B.check_no_kw('comb', n, k)\n\n // raise TypeError if n or k is not an integer\n check_int(n)\n check_int(k)\n\n if(k < 0){\n throw _b_.ValueError.$factory(\"k must be a non-negative integer\")\n }\n if(n < 0){\n throw _b_.ValueError.$factory(\"n must be a non-negative integer\")\n }\n\n if(k > n){\n return 0\n }\n // Evaluates to n! / (k! * (n - k)!)\n var fn = _mod.factorial(n),\n fk = _mod.factorial(k),\n fn_k = _mod.factorial(n - k)\n return $B.floordiv(fn, $B.mul(fk, fn_k))\n },\n copysign: function(x, y){\n $B.check_nb_args('copysign', 2, arguments)\n $B.check_no_kw('copysign', x,y)\n\n var x1 = Math.abs(float_check(x))\n var y1 = float_check(y)\n var sign = Math.sign(y1)\n sign = (sign == 1 || Object.is(sign, +0)) ? 1 : - 1\n return float.$factory(x1 * sign)\n },\n cos : function(x){\n $B.check_nb_args('cos ', 1, arguments)\n $B.check_no_kw('cos ', x)\n return float.$factory(Math.cos(float_check(x)))\n },\n cosh: function(x){\n $B.check_nb_args('cosh', 1, arguments)\n $B.check_no_kw('cosh', x)\n\n if(_b_.$isinf(x)) {return float.$factory('inf')}\n var y = float_check(x)\n if(Math.cosh !== undefined){return float.$factory(Math.cosh(y))}\n return float.$factory((Math.pow(Math.E, y) +\n Math.pow(Math.E, -y)) / 2)\n },\n degrees: function(x){\n $B.check_nb_args('degrees', 1, arguments)\n $B.check_no_kw('degrees', x)\n return float.$factory(float_check(x) * 180 / Math.PI)\n },\n dist: function(p, q){\n $B.check_nb_args('dist', 2, arguments)\n $B.check_no_kw('dist', p, q)\n var itp = _b_.iter(p),\n itq = _b_.iter(q),\n res = 0\n while(true){\n try{\n var next_p = _b_.next(itp)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n // check that the other iterator is also exhausted\n try{\n var next_q = _b_.next(itq)\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n if(typeof res == \"number\" || res instanceof Number){\n return Math.sqrt(res)\n }else{\n return Math.sqrt(parseInt(res.value))\n }\n }\n throw err\n }\n }\n throw err\n }\n try{\n var next_q = _b_.next(itq),\n diff = $B.sub(next_p, next_q)\n res = $B.add(res, $B.mul(diff, diff))\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n throw _b_.ValueError.$factory(\"both points must have \" +\n \"the same number of dimensions\")\n }\n throw err\n }\n }\n },\n e: float.$factory(Math.E),\n erf: function(x){\n $B.check_nb_args('erf', 1, arguments)\n $B.check_no_kw('erf', x)\n\n // inspired from\n // http://stackoverflow.com/questions/457408/is-there-an-easily-available-implementation-of-erf-for-python\n var y = float_check(x)\n var t = 1.0 / (1.0 + 0.5 * Math.abs(y))\n var ans = 1 - t * Math.exp( -y * y - 1.26551223 +\n t * ( 1.00002368 +\n t * ( 0.37409196 +\n t * ( 0.09678418 +\n t * (-0.18628806 +\n t * ( 0.27886807 +\n t * (-1.13520398 +\n t * ( 1.48851587 +\n t * (-0.82215223 +\n t * 0.17087277)))))))))\n if(y >= 0.0){return ans}\n return -ans\n },\n erfc: function(x){\n\n $B.check_nb_args('erfc', 1, arguments)\n $B.check_no_kw('erfc', x)\n\n // inspired from\n // http://stackoverflow.com/questions/457408/is-there-an-easily-available-implementation-of-erf-for-python\n var y = float_check(x)\n var t = 1.0 / (1.0 + 0.5 * Math.abs(y))\n var ans = 1 - t * Math.exp( -y * y - 1.26551223 +\n t * ( 1.00002368 +\n t * ( 0.37409196 +\n t * ( 0.09678418 +\n t * (-0.18628806 +\n t * ( 0.27886807 +\n t * (-1.13520398 +\n t * ( 1.48851587 +\n t * (-0.82215223 +\n t * 0.17087277)))))))))\n if(y >= 0.0){return 1 - ans}\n return 1 + ans\n },\n exp: function(x){\n $B.check_nb_args('exp', 1, arguments)\n $B.check_no_kw('exp', x)\n\n if(_b_.$isninf(x)){return float.$factory(0)}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var _r = Math.exp(float_check(x))\n if(_b_.$isinf(_r)){throw _b_.OverflowError.$factory(\"math range error\")}\n return float.$factory(_r)\n },\n expm1: function(x){\n $B.check_nb_args('expm1', 1, arguments)\n $B.check_no_kw('expm1', x)\n\n if(_b_.$isninf(x)){return float.$factory(0)}\n if(_b_.$isinf(x)){return float.$factory('inf')}\n var _r = Math.expm1(float_check(x))\n if(_b_.$isinf(_r)){throw _b_.OverflowError.$factory(\"math range error\")}\n return float.$factory(_r)\n },\n //fabs: function(x){ return x>0?float.$factory(x):float.$factory(-x)},\n fabs: function(x){\n $B.check_nb_args('fabs', 1, arguments)\n $B.check_no_kw('fabs', x)\n return _b_.$fabs(x) // located in py_float.js\n },\n factorial: function(x){\n $B.check_nb_args('factorial', 1, arguments)\n $B.check_no_kw('factorial', x)\n\n //using code from http://stackoverflow.com/questions/3959211/fast-factorial-function-in-javascript\n if(! check_int_or_round_float(x)){\n throw _b_.ValueError.$factory(\"factorial() only accepts integral values\")\n }else if($B.rich_comp(\"__lt__\", x, 0)){\n throw _b_.ValueError.$factory(\"factorial() not defined for negative values\")\n }\n var r = 1\n for(var i = 2; i <= x; i++){\n r = $B.mul(r, i)\n }\n return r\n },\n floor: function(x){\n $B.check_nb_args('floor', 1, arguments)\n $B.check_no_kw('floor', x)\n return Math.floor(float_check(x))\n },\n fmod: function(x,y){\n $B.check_nb_args('fmod', 2, arguments)\n $B.check_no_kw('fmod', x,y)\n return float.$factory(float_check(x) % float_check(y))\n },\n frexp: function(x){\n $B.check_nb_args('frexp', 1, arguments)\n $B.check_no_kw('frexp', x)\n\n var _l = _b_.$frexp(x)\n return _b_.tuple.$factory([float.$factory(_l[0]), _l[1]])\n },\n fsum: function(x){\n $B.check_nb_args('fsum', 1, arguments)\n $B.check_no_kw('fsum', x)\n\n /* Translation into Javascript of the function msum in an Active\n State Cookbook recipe : https://code.activestate.com/recipes/393090/\n by Raymond Hettinger\n */\n var partials = [],\n res = new Number(),\n _it = _b_.iter(x)\n while(true){\n try{\n var x = _b_.next(_it),\n i = 0\n for(var j = 0, len = partials.length; j < len; j++){\n var y = partials[j]\n if(Math.abs(x) < Math.abs(y)){\n var z = x\n x = y\n y = z\n }\n var hi = x + y,\n lo = y - (hi - x)\n if(lo){\n partials[i] = lo\n i++\n }\n x = hi\n }\n partials = partials.slice(0, i).concat([x])\n }catch(err){\n if(_b_.isinstance(err, _b_.StopIteration)){break}\n throw err\n }\n }\n var res = new Number(0)\n for(var i = 0; i < partials.length; i++){\n res += new Number(partials[i])\n }\n return new Number(res)\n },\n gamma: function(x){\n $B.check_nb_args('gamma', 1, arguments)\n $B.check_no_kw('gamma', x)\n\n if(_b_.isinstance(x, int)){\n if(i < 1){\n throw _b_.ValueError.$factory(\"math domain error\")\n }\n var res = 1\n for(var i = 1; i < x; i++){res *= i}\n return new Number(res)\n }\n // Adapted from https://en.wikipedia.org/wiki/Lanczos_approximation\n var p = [676.5203681218851,\n -1259.1392167224028,\n 771.32342877765313,\n -176.61502916214059,\n 12.507343278686905,\n -0.13857109526572012,\n 9.9843695780195716e-6,\n 1.5056327351493116e-7\n ]\n\n var EPSILON = 1e-07\n function drop_imag(z){\n if(Math.abs(z.imag) <= EPSILON){\n z = z.real\n }\n return z\n }\n var z = x\n if(z < 0.5){\n var y = Math.PI / (Math.sin(Math.PI * z) * _mod.gamma(1-z)) // Reflection formula\n }else{\n z -= 1\n var x = 0.99999999999980993,\n i = 0\n for(var i = 0, len = p.length; i < len; i++){\n var pval = p[i]\n x += pval / (z + i + 1)\n }\n var t = z + p.length - 0.5,\n sq = Math.sqrt(2 * Math.PI),\n y = sq * Math.pow(t, (z + 0.5)) * Math.exp(-t) * x\n }\n return drop_imag(y)\n },\n gcd: function(){\n var $ = $B.args(\"gcd\", 2, {a: null, b: null}, ['a', 'b'],\n arguments, {}, null, null),\n a = $B.PyNumber_Index($.a),\n b = $B.PyNumber_Index($.b)\n if(a == 0 && b == 0){return 0}\n // https://stackoverflow.com/questions/17445231/js-how-to-find-the-greatest-common-divisor\n a = _b_.abs(a)\n b = _b_.abs(b)\n if($B.rich_comp(\"__gt__\", b, a)){\n var temp = a\n a = b\n b = temp\n }\n while(true){\n if(b == 0){\n return a\n }\n a = $B.rich_op(\"mod\", a, b)\n if(a == 0){\n return b\n }\n b = $B.rich_op(\"mod\", b, a)\n }\n },\n hypot: function(x, y){\n var $ = $B.args(\"hypot\", 2, {x: null, y:null}, ['x', 'y'],\n arguments, {}, \"args\", null),\n args = [x, y].concat($.args),\n res = 0\n return float.$factory(Math.hypot(...args))\n },\n inf: float.$factory('inf'),\n isclose: function(){\n var $ns = $B.args(\"isclose\",\n 4,\n {a: null, b: null, rel_tol: null, abs_tol: null},\n ['a', 'b', 'rel_tol', 'abs_tol'],\n arguments,\n {rel_tol: 1e-09, abs_tol: 0.0},\n '*',\n null)\n var a = $ns['a'],\n b = $ns['b'],\n rel_tol = $ns['rel_tol'],\n abs_tol = $ns['abs_tol']\n if(rel_tol < 0.0 || abs_tol < 0.0){\n throw ValueError.$factory('tolerances must be non-negative')\n }\n if(typeof a !== \"number\" || typeof b !== \"number\"){\n if(! _b_.isinstance(a, [_b_.float, _b_.int]) ||\n ! _b_.isinstance(b, [_b_.float, _b_.int])){\n throw _b_.TypeError.$factory(\"must be real number, not str\")\n }\n }\n if(a == b){\n return True\n }\n if(_b_.$isinf(a) || _b_.$isinf(b)){\n return a === b\n }\n var diff = _b_.$fabs(b - a)\n var result = (\n (diff <= _b_.$fabs(rel_tol * b)) ||\n (diff <= _b_.$fabs(rel_tol * a))\n ) || (diff <= _b_.$fabs(abs_tol)\n )\n return result\n },\n isfinite: function(x){\n $B.check_nb_args('isfinite', 1, arguments)\n $B.check_no_kw('isfinite', x)\n return isFinite(float_check(x))\n },\n isinf: function(x){\n $B.check_nb_args('isinf', 1, arguments)\n $B.check_no_kw('isinf', x)\n return _b_.$isinf(float_check(x))\n },\n isnan: function(x){\n $B.check_nb_args('isnan', 1, arguments)\n $B.check_no_kw('isnan', x)\n return isNaN(float_check(x))\n },\n isqrt: function(x){\n $B.check_nb_args('isqrt', 1, arguments)\n $B.check_no_kw('isqrt', x)\n\n x = $B.PyNumber_Index(x)\n if($B.rich_comp(\"__lt__\", x, 0)){\n throw _b_.ValueError.$factory(\n \"isqrt() argument must be nonnegative\")\n }\n if(typeof x == \"number\"){\n return Math.floor(Math.sqrt(x))\n }else{ // big integer\n var v = parseInt(x.value),\n candidate = Math.floor(Math.sqrt(v)),\n c1\n // Use successive approximations : sqr = (sqr + (x / sqr)) / 2\n // Limit to 100 iterations\n for(var i = 0; i < 100; i++){\n c1 = $B.floordiv($B.add(candidate,\n $B.floordiv(x, candidate)), 2)\n if(c1 === candidate || c1.value === candidate.value){\n break\n }\n candidate = c1\n }\n if($B.rich_comp(\"__gt__\", $B.mul(candidate, candidate), x)){\n // Result might be greater by 1\n candidate = $B.sub(candidate, 1)\n }\n return candidate\n }\n },\n ldexp: function(x, i){\n $B.check_nb_args('ldexp', 2, arguments)\n $B.check_no_kw('ldexp', x, i)\n return _b_.$ldexp(x, i) //located in py_float.js\n },\n lgamma: function(x){\n $B.check_nb_args('lgamma', 1, arguments)\n $B.check_no_kw('lgamma', x)\n\n return new Number(Math.log(Math.abs(_mod.gamma(x))))\n },\n log: function(x, base){\n var $ = $B.args(\"log\", 2, {x: null, base: null}, ['x', 'base'],\n arguments, {base: _b_.None}, null, null),\n x = $.x,\n base = $.base\n\n var x1 = float_check(x)\n if(base === _b_.None){return float.$factory(Math.log(x1))}\n return float.$factory(Math.log(x1) / Math.log(float_check(base)))\n },\n log1p: function(x){\n $B.check_nb_args('log1p', 1, arguments)\n $B.check_no_kw('log1p', x)\n return float.$factory(Math.log1p(float_check(x)))\n },\n log2: function(x){\n $B.check_nb_args('log2', 1, arguments)\n $B.check_no_kw('log2', x)\n\n if(isNaN(x)){return float.$factory('nan')}\n if(_b_.$isninf(x)) {throw ValueError.$factory('')}\n var x1 = float_check(x)\n if(x1 < 0.0){throw ValueError.$factory('')}\n return float.$factory(Math.log(x1) / Math.LN2)\n },\n log10: function(x){\n $B.check_nb_args('log10', 1, arguments)\n $B.check_no_kw('log10', x)\n\n return float.$factory(Math.log10(float_check(x)))\n },\n modf: function(x){\n $B.check_nb_args('modf', 1, arguments)\n $B.check_no_kw('modf', x)\n\n if(_b_.$isninf(x)){\n return _b_.tuple.$factory([0.0, float.$factory('-inf')])\n }\n if(_b_.$isinf(x)){\n return _b_.tuple.$factory([0.0, float.$factory('inf')])\n }\n if(isNaN(x)){\n return _b_.tuple.$factory([float.$factory('nan'),\n float.$factory('nan')])\n }\n\n var x1 = float_check(x)\n if(x1 > 0){\n var i = float.$factory(x1 - Math.floor(x1))\n return _b_.tuple.$factory([i, float.$factory(x1 - i)])\n }\n\n var x2 = Math.ceil(x1)\n var i = float.$factory(x1 - x2)\n return _b_.tuple.$factory([i, float.$factory(x2)])\n },\n nan: float.$factory('nan'),\n perm: function(n, k){\n var $ = $B.args(\"perm\", 2, {n: null, k: null}, ['n', 'k'],\n arguments, {k: _b_.None}, null, null),\n n = $.n,\n k = $.k\n\n if(k === _b_.None){\n check_int(n)\n return _mod.factorial(n)\n }\n // raise TypeError if n or k is not an integer\n check_int(n)\n check_int(k)\n\n if(k < 0){\n throw _b_.ValueError.$factory(\"k must be a non-negative integer\")\n }\n if(n < 0){\n throw _b_.ValueError.$factory(\"n must be a non-negative integer\")\n }\n\n if(k > n){\n return 0\n }\n // Evaluates to n! / (n - k)!\n var fn = _mod.factorial(n),\n fn_k = _mod.factorial(n - k)\n return $B.floordiv(fn, fn_k)\n },\n pi : float.$factory(Math.PI),\n pow: function(x, y){\n $B.check_nb_args('pow', 2, arguments)\n $B.check_no_kw('pow', x,y)\n\n var x1 = float_check(x)\n var y1 = float_check(y)\n if(y1 == 0){return float.$factory(1)}\n if(x1 == 0 && y1 < 0){throw _b_.ValueError.$factory('')}\n\n if(isNaN(y1)){\n if(x1 == 1){return float.$factory(1)}\n return float.$factory('nan')\n }\n if(x1 == 0){return float.$factory(0)}\n\n if(_b_.$isninf(y)){\n if(x1 == 1 || x1 == -1){return float.$factory(1)}\n if(x1 < 1 && x1 > -1){return float.$factory('inf')}\n return float.$factory(0)\n }\n if(_b_.$isinf(y)){\n if(x1 == 1 || x1 == -1){return float.$factory(1)}\n if(x1 < 1 && x1 > -1){return float.$factory(0)}\n return float.$factory('inf')\n }\n\n if(isNaN(x1)){return float.$factory('nan')}\n if(_b_.$isninf(x)){\n if(y1 > 0 && isOdd(y1)){return float.$factory('-inf')}\n if(y1 > 0){return float.$factory('inf')} // this is even or a float\n if(y1 < 0){return float.$factory(0)}\n return float.$factory(1)\n }\n\n if(_b_.$isinf(x)){\n if(y1 > 0){return float.$factory('inf')}\n if(y1 < 0){return float.$factory(0)}\n return float.$factory(1)\n }\n\n var r\n if(isLargeNumber(x1) || isLargeNumber(y1)){\n var x = new BigNumber(x1),\n y = new BigNumber(y1)\n r = x.pow(y)\n }else{\n r = Math.pow(x1,y1)\n }\n\n if(isNaN(r)){return float.$factory('nan')}\n if(_b_.$isninf(r)){return float.$factory('-inf')}\n if(_b_.$isinf(r)){return float.$factory('inf')}\n\n return r\n },\n prod: function(){\n var $ = $B.args(\"prod\", 1, {iterable:null, start:null},\n [\"iterable\", \"start\"], arguments, {start: 1}, \"*\",\n null),\n iterable = $.iterable,\n start = $.start\n var res = start,\n it = _b_.iter(iterable),\n x\n while(true){\n try{\n x = _b_.next(it)\n if(x == 0){\n return 0\n }\n res = $B.mul(res, x)\n }catch(err){\n if(err.__class__ === _b_.StopIteration){\n return res\n }\n throw err\n }\n }\n },\n radians: function(x){\n $B.check_nb_args('radians', 1, arguments)\n $B.check_no_kw('radians', x)\n\n return float.$factory(float_check(x) * Math.PI / 180)\n },\n sin : function(x){\n $B.check_nb_args('sin ', 1, arguments)\n $B.check_no_kw('sin ', x)\n return float.$factory(Math.sin(float_check(x)))},\n sinh: function(x) {\n $B.check_nb_args('sinh', 1, arguments)\n $B.check_no_kw('sinh', x)\n\n var y = float_check(x)\n if(Math.sinh !== undefined){return float.$factory(Math.sinh(y))}\n return float.$factory(\n (Math.pow(Math.E, y) - Math.pow(Math.E, -y)) / 2)\n },\n sqrt: function(x){\n $B.check_nb_args('sqrt ', 1, arguments)\n $B.check_no_kw('sqrt ', x)\n\n var y = float_check(x)\n if(y < 0){throw ValueError.$factory(\"math range error\")}\n if(_b_.$isinf(y)){return float.$factory('inf')}\n var _r = Math.sqrt(y)\n if(_b_.$isinf(_r)){throw _b_.OverflowError.$factory(\"math range error\")}\n return float.$factory(_r)\n },\n tan: function(x) {\n $B.check_nb_args('tan', 1, arguments)\n $B.check_no_kw('tan', x)\n\n var y = float_check(x)\n return float.$factory(Math.tan(y))\n },\n tanh: function(x) {\n $B.check_nb_args('tanh', 1, arguments)\n $B.check_no_kw('tanh', x)\n\n var y = float_check(x)\n if(Math.tanh !== undefined){return float.$factory(Math.tanh(y))}\n return float.$factory((Math.pow(Math.E, y) - Math.pow(Math.E, -y))/\n (Math.pow(Math.E, y) + Math.pow(Math.E, -y)))\n },\n tau: 6.283185307179586,\n trunc: function(x) {\n $B.check_nb_args('trunc', 1, arguments)\n $B.check_no_kw('trunc', x)\n\n try{return getattr(x, '__trunc__')()}catch(err){}\n var x1 = float_check(x)\n if(!isNaN(parseFloat(x1)) && isFinite(x1)){\n if(Math.trunc !== undefined){return int.$factory(Math.trunc(x1))}\n if(x1 > 0){return int.$factory(Math.floor(x1))}\n return int.$factory(Math.ceil(x1)) // x1 < 0\n }\n throw _b_.ValueError.$factory(\n 'object is not a number and does not contain __trunc__')\n }\n}\n\nfor(var $attr in _mod){\n if(typeof _mod[$attr] === 'function'){\n _mod[$attr].__class__ = $B.builtin_function\n }\n}\n\nreturn _mod\n\n})(__BRYTHON__)\n"], "modulefinder": [".js", "var $module=(function($B){\n\nvar _b_=$B.builtins\nvar _mod = {}\n\n$ModuleFinderDict = {__class__:_b_.type,__name__:'ModuleFinder'}\n$ModuleFinderDict.__mro__ = [_b_.object]\n\n$ModuleFinderDict.run_script = function(self, pathname){\n // pathname is the url of a Python script\n var py_src = _b_.$open(pathname).read()\n // transform into internal Brython tree structure\n var root = $B.py2js(py_src)\n // walk the tree to find occurences of imports\n function walk(node){\n var modules = []\n var ctx = node.context\n if(ctx && ctx.type=='node'){ctx = ctx.tree[0]}\n\n if(ctx && ctx.type==\"import\"){\n for(var i=0, _len_i = ctx.tree.length; i < _len_i;i++){\n if(modules.indexOf(ctx.tree[i].name)==-1){\n modules.push(ctx.tree[i].name)\n }\n }\n }else if(ctx && ctx.type==\"from\"){\n if(modules.indexOf(ctx.module)==-1){\n modules.push(ctx.module)\n }\n }\n \n for(var i=0, _len_i = node.children.length; i < _len_i;i++){\n mods = walk(node.children[i])\n for(var j=0, _len_j = mods.length; j < _len_j;j++){\n if(modules.indexOf(mods[j])==-1){modules.push(mods[j])}\n }\n }\n return modules\n }\n self.modules = walk(root)\n}\n\n_mod.ModuleFinder = function(){return {__class__:$ModuleFinderDict}\n}\n_mod.ModuleFinder.$dict = $ModuleFinderDict\n_mod.ModuleFinder.__class__ = $B.$factory\n$ModuleFinderDict.$factory = _mod.ModuleFinder\n\nreturn _mod\n})(__BRYTHON__)\n"], "posix": [".js", "/*\nThis module provides access to operating system functionality that is\nstandardized by the C Standard and the POSIX standard (a thinly\ndisguised Unix interface). Refer to the library manual and\ncorresponding Unix manual entries for more information on calls.\n*/\n\nvar $B = __BRYTHON__,\n _b_ = $B.builtins\n\nfunction _randint(a, b){\n return parseInt(Math.random() * (b - a + 1) + a)\n}\n\nvar stat_result = $B.make_class(\"stat_result\",\n function(filename){\n // Use $B.files, created by \"python -m brython --make_file_system\"\n if($B.files && $B.files.hasOwnProperty(filename)){\n var f = $B.files[filename],\n res = {\n __class__: stat_result,\n st_atime: new Date().getTime(),\n st_ctime: f.ctime,\n st_mtime: f.mtime,\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: 0,\n st_size: 1\n };\n [\"atime\", \"mtime\", \"ctime\"].\n forEach(function(item){\n res[\"st_\" + item + \"_ns\"] = res[\"st_\" + item] *\n 1000000\n });\n }else{\n var res = {\n __class__: stat_result,\n st_atime: new Date(),\n st_uid: -1,\n st_gid: -1,\n st_ino: -1,\n st_mode: 0,\n st_size: 1\n };\n [\"mtime\", \"ctime\", \"atime_ns\", \"mtime_ns\", \"ctime_ns\"].\n forEach(function(item){\n res[\"st_\" + item] = res.st_atime\n });\n }\n return res\n }\n)\n$B.set_func_names(stat_result, \"posix\")\n\nvar $module = {\n F_OK: 0,\n O_APPEND: 8,\n O_BINARY: 32768,\n O_CREAT: 256,\n O_EXCL: 1024,\n O_NOINHERIT: 128,\n O_RANDOM: 16,\n O_RDONLY: 0,\n O_RDWR: 2,\n O_SEQUENTIAL: 32,\n O_SHORT_LIVED: 4096,\n O_TEMPORARY: 64,\n O_TEXT: 16384,\n O_TRUNC: 512,\n O_WRONLY: 1,\n P_DETACH: 4,\n P_NOWAIT: 1,\n P_NOWAITO: 3,\n P_OVERLAY: 2,\n P_WAIT: 0,\n R_OK: 4,\n TMP_MAX: 32767,\n W_OK: 2,\n X_OK: 1,\n _have_functions: ['MS_WINDOWS'],\n environ: _b_.dict.$factory(\n [['PYTHONPATH', $B.brython_path],\n ['PYTHONUSERBASE', ' ']]),\n error: _b_.OSError,\n fspath: function(path){\n return path\n },\n getcwd: function(){return $B.brython_path},\n getpid: function(){return 0},\n lstat: function(){return stat_result.$factory()},\n open: function(path, flags){return _b_.open(path, flags)},\n stat: function(filename){return stat_result.$factory(filename)},\n stat_result: function(filename){return stat_result.$factory(filename)},\n urandom: function(n){\n var randbytes = []\n for(var i = 0; i < n; i++){\n randbytes.push(_randint(0, 255))\n }\n return _b_.bytes.$factory(randbytes)\n },\n WTERMSIG: function(){return 0},\n WNOHANG: function(){return _b_.tuple.$factory([0, 0])}\n};\n\n[\"WCOREDUMP\", \"WIFCONTINUED\", \"WIFSTOPPED\", \"WIFSIGNALED\", \"WIFEXITED\"].forEach(function(funcname){\n $module[funcname] = function(){return false}\n });\n\n[\"WEXITSTATUS\", \"WSTOPSIG\", \"WTERMSIG\"].\n forEach(function(funcname){\n $module[funcname] = function(){return _b_.None}\n });\n\n[\"_exit\", \"_getdiskusage\", \"_getfileinformation\", \"_getfinalpathname\",\n \"_getfullpathname\", \"_isdir\", \"abort\", \"access\", \"chdir\", \"chmod\",\n \"close\", \"closerange\", \"device_encoding\", \"dup\", \"dup2\",\n \"execv\", \"execve\", \"fsat\", \"fsync\", \"get_terminal_size\", \"getcwdb\",\n \"getlogin\", \"getppid\", \"isatty\", \"kill\", \"link\", \"listdir\", \"lseek\",\n \"mkdir\", \"pipe\", \"putenv\", \"read\", \"readlink\", \"remove\", \"rename\",\n \"replace\", \"rmdir\", \"spawnv\", \"spawnve\", \"startfile\", \"stat_float_times\",\n \"statvfs_result\", \"strerror\", \"symlink\", \"system\", \"terminal_size\",\n \"times\", \"times_result\", \"umask\", \"uname_result\", \"unlink\", \"utime\",\n \"waitpid\", \"write\"].forEach(function(funcname){\n $module[funcname] = function(){\n throw _b_.NotImplementedError.$factory(\"posix.\" + funcname +\n \" is not implemented\")\n }\n });\n"], "random": [".js", "// Javascript implementation of the random module\n// Based on Ian Bicking's implementation of the Mersenne twister\n\nvar $module = (function($B){\n\nvar _b_ = $B.builtins,\n i\n\nvar VERSION = 3\n\n// Code copied from https://github.com/ianb/whrandom/blob/master/mersenne.js\n// by Ian Bicking\n\n// this program is a JavaScript version of Mersenne Twister,\n// a straight conversion from the original program, mt19937ar.c,\n// translated by y. okada on july 17, 2006.\n// and modified a little at july 20, 2006, but there are not any substantial differences.\n// modularized by Ian Bicking, March 25, 2013 (found original version at http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/VERSIONS/JAVASCRIPT/java-script.html)\n// in this program, procedure descriptions and comments of original source code were not removed.\n// lines commented with //c// were originally descriptions of c procedure. and a few following lines are appropriate JavaScript descriptions.\n// lines commented with /* and */ are original comments.\n// lines commented with // are additional comments in this JavaScript version.\n/*\n A C-program for MT19937, with initialization improved 2002/1/26.\n Coded by Takuji Nishimura and Makoto Matsumoto.\n\n Before using, initialize the state by using init_genrand(seed)\n or init_by_array(init_key, key_length).\n\n Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions\n are met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n 3. The names of its contributors may not be used to endorse or promote\n products derived from this software without specific prior written\n permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n Any feedback is very welcome.\n http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html\n email: m-mat @ math.sci.hiroshima-u.ac.jp (remove space)\n*/\n\nfunction RandomStream(seed) {\n /*jshint bitwise:false */\n /* Period parameters */\n //c//#define N 624\n //c//#define M 397\n //c//#define MATRIX_A 0x9908b0dfUL /* constant vector a */\n //c//#define UPPER_MASK 0x80000000UL /* most significant w-r bits */\n //c//#define LOWER_MASK 0x7fffffffUL /* least significant r bits */\n var N = 624\n var M = 397\n var MATRIX_A = 0x9908b0df /* constant vector a */\n var UPPER_MASK = 0x80000000 /* most significant w-r bits */\n var LOWER_MASK = 0x7fffffff /* least significant r bits */\n //c//static unsigned long mt[N]; /* the array for the state vector */\n //c//static int mti=N+1; /* mti==N+1 means mt[N] is not initialized */\n var mt = new Array(N) /* the array for the state vector */\n var mti = N + 1 /* mti==N+1 means mt[N] is not initialized */\n\n function unsigned32(n1){\n // returns a 32-bits unsiged integer from an operand to which applied a\n // bit operator.\n return n1 < 0 ? (n1 ^ UPPER_MASK) + UPPER_MASK : n1\n }\n\n function subtraction32(n1, n2){\n // emulates lowerflow of a c 32-bits unsiged integer variable, instead of\n // the operator -. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return n1 < n2 ? unsigned32((0x100000000 - (n2 - n1)) & 0xffffffff) :\n n1 - n2\n }\n\n function addition32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of\n // the operator +. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n return unsigned32((n1 + n2) & 0xffffffff)\n }\n\n function multiplication32(n1, n2){\n // emulates overflow of a c 32-bits unsiged integer variable, instead of the\n // operator *. these both arguments must be non-negative integers\n // expressible using unsigned 32 bits.\n var sum = 0\n for (var i = 0; i < 32; ++i){\n if((n1 >>> i) & 0x1){\n sum = addition32(sum, unsigned32(n2 << i))\n }\n }\n return sum\n }\n\n /* initializes mt[N] with a seed */\n //c//void init_genrand(unsigned long s)\n function init_genrand(s) {\n //c//mt[0]= s & 0xffffffff;\n mt[0] = unsigned32(s & 0xffffffff)\n for(mti = 1; mti < N; mti++){\n mt[mti] =\n //c//(1812433253 * (mt[mti-1] ^ (mt[mti-1] >> 30)) + mti);\n addition32(multiplication32(1812433253,\n unsigned32(mt[mti - 1] ^ (mt[mti - 1] >>> 30))), mti)\n /* See Knuth TAOCP Vol2. 3rd Ed. P.106 for multiplier. */\n /* In the previous versions, MSBs of the seed affect */\n /* only MSBs of the array mt[]. */\n /* 2002/01/09 modified by Makoto Matsumoto */\n //c//mt[mti] &= 0xffffffff;\n mt[mti] = unsigned32(mt[mti] & 0xffffffff);\n /* for >32 bit machines */\n }\n }\n\n /* initialize by an array with array-length */\n /* init_key is the array for initializing keys */\n /* key_length is its length */\n /* slight change for C++, 2004/2/26 */\n //c//void init_by_array(unsigned long init_key[], int key_length)\n function init_by_array(init_key, key_length) {\n //c//int i, j, k;\n var i, j, k\n init_genrand(19650218)\n i = 1\n j = 0\n k = (N > key_length ? N : key_length)\n for(; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1664525))\n //c// + init_key[j] + j; /* non linear */\n mt[i] = addition32(\n addition32(unsigned32(mt[i] ^\n multiplication32(unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1664525)),\n init_key[j]), j)\n mt[i] =\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n unsigned32(mt[i] & 0xffffffff)\n i++\n j++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n if(j >= key_length){j = 0}\n }\n for(k = N - 1; k; k--){\n //c//mt[i] = (mt[i] ^ ((mt[i-1] ^ (mt[i-1] >> 30)) * 1566083941))\n //c//- i; /* non linear */\n mt[i] = subtraction32(\n unsigned32(\n (mt[i]) ^\n multiplication32(\n unsigned32(mt[i - 1] ^ (mt[i - 1] >>> 30)),\n 1566083941)),\n i\n )\n //c//mt[i] &= 0xffffffff; /* for WORDSIZE > 32 machines */\n mt[i] = unsigned32(mt[i] & 0xffffffff)\n i++\n if(i >= N){mt[0] = mt[N - 1]; i = 1}\n }\n mt[0] = 0x80000000; /* MSB is 1; assuring non-zero initial array */\n }\n\n /* generates a random number on [0,0xffffffff]-interval */\n //c//unsigned long genrand_int32(void)\n function genrand_int32() {\n //c//unsigned long y;\n //c//static unsigned long mag01[2]={0x0UL, MATRIX_A};\n var y;\n var mag01 = [0x0, MATRIX_A];\n /* mag01[x] = x * MATRIX_A for x=0,1 */\n\n if(mti >= N){ /* generate N words at one time */\n //c//int kk;\n var kk\n\n if(mti == N + 1){ /* if init_genrand() has not been called, */\n init_genrand(Date.now()) /* a default initial seed is used */\n }\n\n for(kk = 0; kk < N - M; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+M] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + M] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n for(;kk < N - 1; kk++){\n //c//y = (mt[kk]&UPPER_MASK)|(mt[kk+1]&LOWER_MASK);\n //c//mt[kk] = mt[kk+(M-N)] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[kk]&UPPER_MASK) | (mt[kk + 1]&LOWER_MASK))\n mt[kk] = unsigned32(mt[kk + (M - N)] ^ (y >>> 1) ^ mag01[y & 0x1])\n }\n //c//y = (mt[N-1]&UPPER_MASK)|(mt[0]&LOWER_MASK);\n //c//mt[N-1] = mt[M-1] ^ (y >> 1) ^ mag01[y & 0x1];\n y = unsigned32((mt[N - 1] & UPPER_MASK) | (mt[0] & LOWER_MASK))\n mt[N - 1] = unsigned32(mt[M - 1] ^ (y >>> 1) ^ mag01[y & 0x1])\n mti = 0\n }\n\n y = mt[mti++]\n\n /* Tempering */\n //c//y ^= (y >> 11);\n //c//y ^= (y << 7) & 0x9d2c5680;\n //c//y ^= (y << 15) & 0xefc60000;\n //c//y ^= (y >> 18);\n y = unsigned32(y ^ (y >>> 11))\n y = unsigned32(y ^ ((y << 7) & 0x9d2c5680))\n y = unsigned32(y ^ ((y << 15) & 0xefc60000))\n y = unsigned32(y ^ (y >>> 18))\n\n return y\n }\n\n /* generates a random number on [0,0x7fffffff]-interval */\n //c//long genrand_int31(void)\n function genrand_int31(){\n //c//return (genrand_int32()>>1);\n return (genrand_int32()>>>1)\n }\n\n /* generates a random number on [0,1]-real-interval */\n //c//double genrand_real1(void)\n function genrand_real1(){\n return genrand_int32()*(1.0/4294967295.0)\n /* divided by 2^32-1 */\n }\n\n /* generates a random number on [0,1)-real-interval */\n //c//double genrand_real2(void)\n function genrand_real2(){\n return genrand_int32() * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on (0,1)-real-interval */\n //c//double genrand_real3(void)\n function genrand_real3() {\n return ((genrand_int32()) + 0.5) * (1.0 / 4294967296.0)\n /* divided by 2^32 */\n }\n\n /* generates a random number on [0,1) with 53-bit resolution*/\n //c//double genrand_res53(void)\n function genrand_res53() {\n //c//unsigned long a=genrand_int32()>>5, b=genrand_int32()>>6;\n var a = genrand_int32() >>> 5,\n b = genrand_int32() >>> 6\n return (a * 67108864.0 + b) * (1.0 / 9007199254740992.0)\n }\n /* These real versions are due to Isaku Wada, 2002/01/09 added */\n\n var random = genrand_res53\n\n random.seed = function(seed){\n if(seed === undefined){seed = Date.now()}\n if(typeof seed != \"number\"){seed = parseInt(seed, 10)}\n if((seed !== 0 && ! seed) || isNaN(seed)){\n throw _b_.ValueError.$factory(\"Bad seed: \" + _b_.str.$factory(seed))\n }\n init_genrand(seed)\n }\n\n random.seed(seed)\n\n random.int31 = genrand_int31\n random.real1 = genrand_real1\n random.real2 = genrand_real2\n random.real3 = genrand_real3\n random.res53 = genrand_res53\n\n // Added for compatibility with Python\n random.getstate = function(){return [VERSION, mt, mti]}\n\n random.setstate = function(state){\n mt = state[1]\n mti = state[2]\n }\n\n return random\n\n}\n\n// magic constants\n\nvar NV_MAGICCONST = 4 * Math.exp(-0.5)/Math.sqrt(2),\n gauss_next = null,\n NV_MAGICCONST = 1.71552776992141,\n TWOPI = 6.28318530718,\n LOG4 = 1.38629436111989,\n SG_MAGICCONST = 2.50407739677627,\n VERSION = VERSION\n\nvar Random = $B.make_class(\"Random\",\n function(){\n return {\n __class__: Random,\n _random: RandomStream(Date.now())\n }\n }\n)\n\nRandom._randbelow = function(self, x){\n return Math.floor(x * self._random())\n}\n\nRandom._urandom = function(self, n){\n /*\n urandom(n) -> str\n Return n random bytes suitable for cryptographic use.\n */\n\n var randbytes = []\n for(i = 0; i < n; i++){randbytes.push(parseInt(self._random() * 256))}\n return _b_.bytes.$factory(randbytes)\n}\n\nRandom.betavariate = function(){\n /* Beta distribution.\n\n Conditions on the parameters are alpha > 0 and beta > 0.\n Returned values range between 0 and 1.\n\n\n # This version due to Janne Sinkkonen, and matches all the std\n # texts (e.g., Knuth Vol 2 Ed 3 pg 134 \"the beta distribution\").\n */\n\n var $ = $B.args('betavariate', 3, {self: null, alpha:null, beta:null},\n ['self', 'alpha', 'beta'], arguments, {}, null, null),\n self = $.self,\n alpha = $.alpha,\n beta = $.beta\n\n var y = Random.gammavariate(self, alpha, 1)\n if(y == 0){return _b_.float.$factory(0)}\n else{return y / (y + Random.gammavariate(self, beta, 1))}\n}\n\nRandom.choice = function(){\n var $ = $B.args(\"choice\", 2,\n {self: null, seq:null},[\"self\", \"seq\"],arguments, {}, null, null),\n self = $.self,\n seq = $.seq\n var len, rank\n if(Array.isArray(seq)){len = seq.length}\n else{len = _b_.getattr(seq,\"__len__\")()}\n if(len == 0){\n throw _b_.IndexError.$factory(\"Cannot choose from an empty sequence\")\n }\n rank = parseInt(self._random() * len)\n if(Array.isArray(seq)){return seq[rank]}\n else{return _b_.getattr(seq, \"__getitem__\")(rank)}\n}\n\nRandom.choices = function(){\n var $ = $B.args(\"choices\", 3,\n {self: null,population:null, weights:null, cum_weights:null, k:null},\n [\"self\", \"population\", \"weights\", \"cum_weights\", \"k\"], arguments,\n {weights: _b_.None, cum_weights: _b_.None, k: 1}, \"*\", null),\n self = $.self,\n population = $.population,\n weights = $.weights,\n cum_weights = $.cum_weights,\n k = $.k\n\n if(population.length == 0){\n throw _b_.ValueError.$factory(\"population is empty\")\n }\n population = _b_.list.$factory(population) // issue #1268\n if(weights === _b_.None){\n weights = []\n population.forEach(function(){\n weights.push(1)\n })\n }else if(cum_weights !== _b_.None){\n throw _b_.TypeError.$factory(\"Cannot specify both weights and \" +\n \"cumulative weights\")\n }else{\n if(weights.length != population.length){\n throw _b_.ValueError.$factory('The number of weights does not ' +\n 'match the population')\n }\n }\n if(cum_weights === _b_.None){\n var cum_weights = [weights[0]]\n weights.forEach(function(weight, rank){\n if(rank > 0){\n cum_weights.push(cum_weights[rank - 1] + weight)\n }\n })\n }else if(cum_weights.length != population.length){\n throw _b_.ValueError.$factory('The number of weights does not ' +\n 'match the population')\n }\n\n var result = []\n for(var i = 0; i < k; i++){\n var rand = self._random() * cum_weights[cum_weights.length - 1]\n for(var rank = 0, len = population.length; rank < len; rank++){\n if(cum_weights[rank] > rand){\n result.push(population[rank])\n break\n }\n }\n }\n return result\n}\n\nRandom.expovariate = function(self, lambd){\n /*\n Exponential distribution.\n\n lambd is 1.0 divided by the desired mean. It should be\n nonzero. (The parameter would be called \"lambda\", but that is\n a reserved word in Python.) Returned values range from 0 to\n positive infinity if lambd is positive, and from negative\n infinity to 0 if lambd is negative.\n\n */\n // lambd: rate lambd = 1/mean\n // ('lambda' is a Python reserved word)\n\n // we use 1-random() instead of random() to preclude the\n // possibility of taking the log of zero.\n return -Math.log(1.0 - self._random()) / lambd\n}\n\nRandom.gammavariate = function(self, alpha, beta){\n /* Gamma distribution. Not the gamma function!\n\n Conditions on the parameters are alpha > 0 and beta > 0.\n\n The probability distribution function is:\n\n x ** (alpha - 1) * math.exp(-x / beta)\n pdf(x) = --------------------------------------\n math.gamma(alpha) * beta ** alpha\n\n */\n\n // alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2\n\n // Warning: a few older sources define the gamma distribution in terms\n // of alpha > -1.0\n\n var $ = $B.args('gammavariate', 3,\n {self: null, alpha:null, beta:null},\n ['self', 'alpha', 'beta'],\n arguments, {}, null, null),\n self = $.self,\n alpha = $.alpha,\n beta = $.beta,\n LOG4 = Math.log(4),\n SG_MAGICCONST = 1.0 + Math.log(4.5)\n\n if(alpha <= 0.0 || beta <= 0.0){\n throw _b_.ValueError.$factory('gammavariate: alpha and beta must be > 0.0')\n }\n\n if(alpha > 1.0){\n\n // Uses R.C.H. Cheng, \"The generation of Gamma\n // variables with non-integral shape parameters\",\n // Applied Statistics, (1977), 26, No. 1, p71-74\n\n var ainv = Math.sqrt(2.0 * alpha - 1.0),\n bbb = alpha - LOG4,\n ccc = alpha + ainv\n\n while(true){\n var u1 = self._random()\n if(!((1e-7 < u1) && (u1 < .9999999))){\n continue\n }\n var u2 = 1.0 - self._random(),\n v = Math.log(u1 / (1.0 - u1)) / ainv,\n x = alpha * Math.exp(v),\n z = u1 * u1 * u2,\n r = bbb + ccc * v - x\n if((r + SG_MAGICCONST - 4.5 * z >= 0.0) || r >= Math.log(z)){\n return x * beta\n }\n }\n }else if(alpha == 1.0){\n // expovariate(1)\n var u = self._random()\n while(u <= 1e-7){u = self._random()}\n return -Math.log(u) * beta\n }else{\n // alpha is between 0 and 1 (exclusive)\n\n // Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle\n\n while(true){\n var u = self._random(),\n b = (Math.E + alpha)/Math.E,\n p = b*u,\n x\n if(p <= 1.0){x = Math.pow(p, (1.0/alpha))}\n else{x = -Math.log((b-p)/alpha)}\n var u1 = self._random()\n if(p > 1.0){\n if(u1 <= Math.pow(x, alpha - 1.0)){\n break\n }\n }else if(u1 <= Math.exp(-x)){\n break\n }\n }\n return x * beta\n }\n}\n\nRandom.gauss = function(){\n\n /* Gaussian distribution.\n\n mu is the mean, and sigma is the standard deviation. This is\n slightly faster than the normalvariate() function.\n\n Not thread-safe without a lock around calls.\n\n # When x and y are two variables from [0, 1), uniformly\n # distributed, then\n #\n # cos(2*pi*x)*sqrt(-2*log(1-y))\n # sin(2*pi*x)*sqrt(-2*log(1-y))\n #\n # are two *independent* variables with normal distribution\n # (mu = 0, sigma = 1).\n # (Lambert Meertens)\n # (corrected version; bug discovered by Mike Miller, fixed by LM)\n\n # Multithreading note: When two threads call this function\n # simultaneously, it is possible that they will receive the\n # same return value. The window is very small though. To\n # avoid this, you have to use a lock around all calls. (I\n # didn't want to slow this down in the serial case by using a\n # lock here.)\n */\n\n var $ = $B.args('gauss', 3, {self: null, mu:null, sigma:null},\n ['self', 'mu', 'sigma'], arguments, {}, null, null),\n self = $.self,\n mu = $.mu,\n sigma = $.sigma\n\n var z = gauss_next\n gauss_next = null\n if(z === null){\n var x2pi = self._random() * Math.PI * 2,\n g2rad = Math.sqrt(-2.0 * Math.log(1.0 - self._random())),\n z = Math.cos(x2pi) * g2rad\n gauss_next = Math.sin(x2pi) * g2rad\n }\n return mu + z*sigma\n}\n\nRandom.getrandbits = function(){\n var $ = $B.args(\"getrandbits\", 2,\n {self: null, k:null},[\"self\", \"k\"],arguments, {}, null, null),\n self = $.self,\n k = $B.$GetInt($.k)\n // getrandbits(k) -> x. Generates a long int with k random bits.\n if(k <= 0){\n throw _b_.ValueError.$factory('number of bits must be greater than zero')\n }\n if(k != _b_.int.$factory(k)){\n throw _b_.TypeError.$factory('number of bits should be an integer')\n }\n var numbytes = (k + 7), // bits / 8 and rounded up\n x = _b_.int.from_bytes(Random._urandom(self, numbytes), 'big')\n return _b_.getattr(x, '__rshift__')(\n _b_.getattr(numbytes*8,'__sub__')(k))\n}\n\nRandom.getstate = function(){\n // Return internal state; can be passed to setstate() later.\n var $ = $B.args('getstate', 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n return $.self._random.getstate()\n}\n\nRandom.lognormvariate = function(){\n /*\n Log normal distribution.\n\n If you take the natural logarithm of this distribution, you'll get a\n normal distribution with mean mu and standard deviation sigma.\n mu can have any value, and sigma must be greater than zero.\n\n */\n return Math.exp(Random.normalvariate.apply(null, arguments))\n}\n\nRandom.normalvariate = function(){\n /*\n Normal distribution.\n\n mu is the mean, and sigma is the standard deviation.\n\n */\n\n // mu = mean, sigma = standard deviation\n\n // Uses Kinderman and Monahan method. Reference: Kinderman,\n // A.J. and Monahan, J.F., \"Computer generation of random\n // variables using the ratio of uniform deviates\", ACM Trans\n // Math Software, 3, (1977), pp257-260.\n\n var $ = $B.args(\"normalvariate\", 3,\n {self: null, mu:null, sigma:null}, [\"self\", \"mu\", \"sigma\"],\n arguments, {}, null, null),\n self = $.self,\n mu = $.mu,\n sigma = $.sigma\n\n while(true){\n var u1 = self._random(),\n u2 = 1.0 - self._random(),\n z = NV_MAGICCONST * (u1 - 0.5) / u2,\n zz = z * z / 4.0\n if(zz <= -Math.log(u2)){break}\n }\n return mu + z * sigma\n}\n\nRandom.paretovariate = function(){\n /* Pareto distribution. alpha is the shape parameter.*/\n // Jain, pg. 495\n\n var $ = $B.args(\"paretovariate\", 2, {self: null, alpha:null},\n [\"self\", \"alpha\"], arguments, {}, null, null)\n\n var u = 1 - $.self._random()\n return 1 / Math.pow(u, 1 / $.alpha)\n}\n\nfunction is_integer(x){\n return _b_.isinstance(x, _b_.int) || (\n _b_.isinstance(x, _b_.float) &&\n x.valueOf() == Math.floor(x.valueOf()))\n}\n\nRandom.randint = function(self, a, b){\n var $ = $B.args('randint', 3,\n {self: null, a:null, b:null},\n ['self', 'a', 'b'],\n arguments, {}, null, null)\n if(! is_integer($.a)){\n throw _b_.ValueError.$factory(\"non-integer value for start\")\n }\n if(! is_integer($.b)){\n throw _b_.ValueError.$factory(\"non-integer value for stop\")\n }\n return Random.randrange($.self, $.a, $B.add($.b, 1))\n}\n\nRandom.random = function(self){\n var res = self._random()\n if(! Number.isInteger(res)){return new Number(res)}\n return res\n}\n\nRandom.randrange = function(){\n var $ = $B.args('randrange', 4,\n {self: null, x:null, stop:null, step:null},\n ['self', 'x', 'stop', 'step'],\n arguments, {stop:null, step:null}, null, null),\n self = $.self,\n _random = self._random\n\n if(! is_integer($.x)){\n throw _b_.ValueError.$factory(\"non-integer arg 1 for randrange()\")\n }\n if($.stop !== null && ! is_integer($.stop)){\n throw _b_.ValueError.$factory(\"non-integer arg 2 for randrange()\")\n }\n if($.step !== null && ! is_integer($.step)){\n throw _b_.ValueError.$factory(\"non-integer arg 3 for randrange()\")\n }\n\n if($.stop === null){\n var start = 0, stop = $.x.valueOf(), step = 1\n }else{\n var start = $.x.valueOf(), stop = $.stop.valueOf(),\n step = $.step === null ? 1 : $.step.valueOf()\n if(step == 0){throw _b_.ValueError.$factory('step cannot be 0')}\n }\n\n if((step > 0 && start >= stop) || (step < 0 && start <= stop)){\n throw _b_.ValueError.$factory(\"empty range for randrange() (\" +\n start + \", \" + stop + \", \" + step + \")\")\n }\n if(typeof start == 'number' && typeof stop == 'number' &&\n typeof step == 'number'){\n return start + step * Math.floor(_random() *\n Math.ceil((stop - start) / step))\n }else{\n var d = _b_.getattr(stop, '__sub__')(start)\n d = _b_.getattr(d, '__floordiv__')(step)\n // Force d to be a LongInt\n d = $B.long_int.$factory(d)\n // d is a long integer with n digits ; to choose a random number\n // between 0 and d the most simple is to take a random digit\n // at each position, except the first one\n var s = d.value,\n _len = s.length,\n res = Math.floor(_random() * (parseInt(s.charAt(0)) +\n (_len == 1 ? 0 : 1))) + ''\n var same_start = res.charAt(0) == s.charAt(0)\n for(var i = 1; i < _len; i++){\n if(same_start){\n // If it's the last digit, don't allow stop as valid\n if(i == _len - 1){\n res += Math.floor(_random() * parseInt(s.charAt(i))) + ''\n }else{\n res += Math.floor(_random() *\n (parseInt(s.charAt(i)) + 1)) + ''\n same_start = res.charAt(i) == s.charAt(i)\n }\n }else{\n res += Math.floor(_random() * 10) + ''\n }\n }\n var offset = {__class__: $B.long_int, value: res,\n pos: true}\n d = _b_.getattr(step, '__mul__')(offset)\n d = _b_.getattr(start, '__add__')(d)\n return _b_.int.$factory(d)\n }\n}\n\nRandom.sample = function(){\n /*\n Chooses k unique random elements from a population sequence or set.\n\n Returns a new list containing elements from the population while\n leaving the original population unchanged. The resulting list is\n in selection order so that all sub-slices will also be valid random\n samples. This allows raffle winners (the sample) to be partitioned\n into grand prize and second place winners (the subslices).\n\n Members of the population need not be hashable or unique. If the\n population contains repeats, then each occurrence is a possible\n selection in the sample.\n\n To choose a sample in a range of integers, use range as an argument.\n This is especially fast and space efficient for sampling from a\n large population: sample(range(10000000), 60)\n\n # Sampling without replacement entails tracking either potential\n # selections (the pool) in a list or previous selections in a set.\n\n # When the number of selections is small compared to the\n # population, then tracking selections is efficient, requiring\n # only a small set and an occasional reselection. For\n # a larger number of selections, the pool tracking method is\n # preferred since the list takes less space than the\n # set and it doesn't suffer from frequent reselections.'\n\n */\n var $ = $B.args('sample', 3, {self: null, population: null,k: null},\n ['self', 'population','k'], arguments, {}, null, null),\n self = $.self,\n population = $.population,\n k = $.k\n\n if(!_b_.hasattr(population, '__len__')){\n throw _b_.TypeError.$factory(\"Population must be a sequence or set. \" +\n \"For dicts, use list(d).\")\n }\n var n = _b_.getattr(population, '__len__')()\n\n if(k < 0 || k > n){\n throw _b_.ValueError.$factory(\"Sample larger than population\")\n }\n var result = [],\n setsize = 21 // size of a small set minus size of an empty list\n if(k > 5){\n setsize += Math.pow(4, Math.ceil(Math.log(k * 3, 4))) // table size for big sets\n }\n if(n <= setsize){\n // An n-length list is smaller than a k-length set\n if(Array.isArray(population)){\n var pool = population.slice()\n }else{var pool = _b_.list.$factory(population)}\n for(var i = 0; i < k; i++){ //invariant: non-selected at [0,n-i)\n var j = Random._randbelow(self, n - i)\n result[i] = pool[j]\n pool[j] = pool[n - i - 1] // move non-selected item into vacancy\n }\n }else{\n selected = {}\n for(var i = 0; i < k; i++){\n var j = Random._randbelow(self, n)\n while(selected[j] !== undefined){\n j = Random._randbelow(self, n)\n }\n selected[j] = true\n result[i] = Array.isArray(population) ? population[j] :\n _b_.getattr(population, '__getitem__')(j)\n }\n }\n return result\n}\n\nRandom.seed = function(){\n /*\n Initialize internal state from hashable object.\n\n None or no argument seeds from current time or from an operating\n system specific randomness source if available.\n\n If *a* is an int, all bits are used.\n */\n var $ = $B.args('seed', 3, {self: null, a: null, version: null},\n ['self', 'a', 'version'],\n arguments, {a: new Date(), version: 2}, null, null),\n self = $.self,\n a = $.a,\n version = $.version\n\n if(version == 1){a = _b_.hash(a)}\n else if(version == 2){\n if(_b_.isinstance(a, _b_.str)){\n a = _b_.int.from_bytes(_b_.bytes.$factory(a, 'utf-8'), 'big')\n }else if(_b_.isinstance(a, [_b_.bytes, _b_.bytearray])){\n a = _b_.int.from_bytes(a, 'big')\n }else if(!_b_.isinstance(a, _b_.int)){\n throw _b_.TypeError.$factory('wrong argument')\n }\n if(a.__class__ === $B.long_int){\n // In this implementation, seed() only accepts safe integers\n // Generate a random one from the underlying string value,\n // using an arbitrary seed (99) to always return the same\n // integer\n var numbers = a.value,\n res = '',\n pos\n self._random.seed(99)\n for(var i = 0; i < 17; i++){\n pos = parseInt(self._random() * numbers.length)\n res += numbers.charAt(pos)\n }\n a = parseInt(res)\n }\n }else{\n throw _b_.ValueError.$factory('version can only be 1 or 2')\n }\n\n self._random.seed(a)\n gauss_next = null\n}\n\nRandom.setstate = function(state){\n // Restore internal state from object returned by getstate().\n var $ = $B.args('setstate', 2, {self: null, state:null}, ['self', 'state'],\n arguments, {}, null, null),\n self = $.self\n var state = self._random.getstate()\n if(! Array.isArray($.state)){\n throw _b_.TypeError.$factory('state must be a list, not ' +\n $B.class_name($.state))\n }\n if($.state.length < state.length){\n throw _b_.ValueError.$factory(\"need more than \" + $.state.length +\n \" values to unpack\")\n }else if($.state.length > state.length){\n throw _b_.ValueError.$factory(\"too many values to unpack (expected \" +\n state.length + \")\")\n }\n if($.state[0] != 3){\n throw _b_.ValueError.$factory(\"ValueError: state with version \" +\n $.state[0] + \" passed to Random.setstate() of version 3\")\n }\n var second = _b_.list.$factory($.state[1])\n if(second.length !== state[1].length){\n throw _b_.ValueError.$factory('state vector is the wrong size')\n }\n for(var i = 0; i < second.length; i++){\n if(typeof second[i] != 'number'){\n throw _b_.ValueError.$factory('state vector items must be integers')\n }\n }\n self._random.setstate($.state)\n}\n\nRandom.shuffle = function(x, random){\n /*\n x, random = random.random -> shuffle list x in place; return None.\n\n Optional arg random is a 0-argument function returning a random\n float in [0.0, 1.0); by default, the standard random.random.\n */\n\n var $ = $B.args('shuffle', 3, {self: null, x: null, random: null},\n ['self', 'x','random'],\n arguments, {random: null}, null, null),\n self = $.self,\n x = $.x,\n random = $.random\n\n if(random === null){random = self._random}\n\n if(Array.isArray(x)){\n for(var i = x.length - 1; i >= 0;i--){\n var j = Math.floor(random() * (i + 1)),\n temp = x[j]\n x[j] = x[i]\n x[i] = temp\n }\n }else{\n var len = _b_.getattr(x, '__len__')(), temp,\n x_get = _b_.getattr(x, '__getitem__'),\n x_set = _b_.getattr(x, '__setitem__')\n\n for(i = len - 1; i >= 0; i--){\n var j = Math.floor(random() * (i + 1)),\n temp = x_get(j)\n x_set(j, x_get(i))\n x_set(i, temp)\n }\n }\n return _b_.None\n}\n\nRandom.triangular = function(){\n /*\n Triangular distribution.\n\n Continuous distribution bounded by given lower and upper limits,\n and having a given mode value in-between.\n\n http://en.wikipedia.org/wiki/Triangular_distribution\n */\n var $ = $B.args('triangular', 4,\n {self: null, low: null, high: null, mode: null},\n ['self', 'low', 'high', 'mode'],\n arguments, {low: 0, high: 1, mode: null}, null, null),\n low = $.low,\n high = $.high,\n mode = $.mode\n\n var u = $.self._random(),\n c = mode === null ? 0.5 : (mode - low) / (high - low)\n if(u > c){\n u = 1 - u\n c = 1 - c\n var temp = low\n low = high\n high = temp\n }\n return low + (high - low) * Math.pow(u * c, 0.5)\n}\n\nRandom.uniform = function(){\n var $ = $B.args('uniform', 3, {self: null, a: null, b: null},\n ['self', 'a', 'b'], arguments, {}, null, null),\n a = $B.$GetInt($.a),\n b = $B.$GetInt($.b)\n\n return a + (b - a) * $.self._random()\n}\n\nRandom.vonmisesvariate = function(){\n /* Circular data distribution.\n\n mu is the mean angle, expressed in radians between 0 and 2*pi, and\n kappa is the concentration parameter, which must be greater than or\n equal to zero. If kappa is equal to zero, this distribution reduces\n to a uniform random angle over the range 0 to 2*pi.\n\n */\n // mu: mean angle (in radians between 0 and 2*pi)\n // kappa: concentration parameter kappa (>= 0)\n // if kappa = 0 generate uniform random angle\n\n // Based upon an algorithm published in: Fisher, N.I.,\n // \"Statistical Analysis of Circular Data\", Cambridge\n // University Press, 1993.\n\n // Thanks to Magnus Kessler for a correction to the\n // implementation of step 4.\n\n var $ = $B.args('vonmisesvariate', 3,\n {self: null, mu: null, kappa:null}, ['self', 'mu', 'kappa'],\n arguments, {}, null, null),\n self = $.self,\n mu = $.mu,\n kappa = $.kappa,\n TWOPI = 2*Math.PI\n\n if(kappa <= 1e-6){return TWOPI * self._random()}\n\n var s = 0.5 / kappa,\n r = s + Math.sqrt(1.0 + s * s)\n\n while(true){\n var u1 = self._random(),\n z = Math.cos(Math.PI * u1),\n d = z / (r + z),\n u2 = self._random()\n if((u2 < 1.0 - d * d) ||\n (u2 <= (1.0 - d) * Math.exp(d))){\n break\n }\n }\n var q = 1.0 / r,\n f = (q + z) / (1.0 + q * z),\n u3 = self._random()\n if(u3 > 0.5){var theta = (mu + Math.acos(f)) % TWOPI}\n else{var theta = (mu - Math.acos(f)) % TWOPI}\n return theta\n}\n\nRandom.weibullvariate = function(){\n /*Weibull distribution.\n\n alpha is the scale parameter and beta is the shape parameter.\n\n */\n // Jain, pg. 499; bug fix courtesy Bill Arms\n var $ = $B.args(\"weibullvariate\", 3,\n {self: null, alpha: null, beta: null},\n [\"self\", \"alpha\", \"beta\"], arguments, {}, null, null)\n\n var u = 1 - $.self._random()\n return $.alpha * Math.pow(-Math.log(u), 1 / $.beta)\n}\n\n$B.set_func_names(Random, \"random\")\n\nvar $module = Random.$factory()\nfor(var attr in Random){\n $module[attr] = (function(x){\n return function(){return Random[x]($module, ...arguments)}\n })(attr)\n $module[attr].$infos = Random[attr].$infos\n}\n\n$module.Random = Random\n\nvar SystemRandom = $B.make_class(\"SystemRandom\",\n function(){\n return {__class__: SystemRandom}\n }\n)\nSystemRandom.__getattribute__ = function(){\n throw $B.builtins.NotImplementedError.$factory()\n}\n\n$module.SystemRandom = SystemRandom\n\nreturn $module\n\n})(__BRYTHON__)\n\n"], "unicodedata": [".js", "// Implementation of unicodedata\n\nvar $module = (function($B){\n\n var _b_ = $B.builtins\n\n // Load unicode table if not already loaded\n if($B.unicodedb === undefined){\n var xhr = new XMLHttpRequest\n xhr.open(\"GET\",\n $B.brython_path + \"unicode.txt\", false)\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n if(this.status == 200){\n $B.unicodedb = this.responseText\n }else{\n console.log(\"Warning - could not \" +\n \"load unicode.txt\")\n }\n }\n }\n xhr.send()\n }\n\n function _info(chr){\n var ord = chr.codePointAt(0),\n hex = ord.toString(16).toUpperCase()\n while(hex.length < 4){hex = \"0\" + hex}\n var re = new RegExp(\"^\" + hex +\";(.+?);(.*?);(.*?);(.*?);(.*?);(.*);(.*);(.*)$\",\n \"m\"),\n search = re.exec($B.unicodedb)\n if(search === null){\n return null\n }else{\n return {\n name: search[1],\n category: search[2],\n combining: search[3],\n bidirectional: search[4],\n decomposition: search[5],\n decimal: search[6],\n digit: search[7],\n numeric: search[8]\n }\n }\n }\n\n function bidirectional(chr){\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr, hex)\n throw _b_.KeyError.$factory(chr)\n }\n return search.bidirectional\n }\n\n function category(chr){\n // Returns the general category assigned to the character chr as\n // string.\n if($B.is_unicode_cn(chr.codePointAt(0))){ // in unicode_data.js\n return \"Cn\"\n }\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return search.category\n }\n\n function combining(chr){\n // Returns the general category assigned to the character chr as\n // string.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.combining)\n }\n\n function decimal(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.decimal)\n }\n\n function decomposition(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return search.decomposition\n }\n\n function digit(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n console.log(\"error\", chr)\n throw _b_.KeyError.$factory(chr)\n }\n return parseInt(search.digit)\n }\n\n function lookup(name){\n // Look up character by name. If a character with the given name is\n // found, return the corresponding character. If not found, KeyError\n // is raised.\n var re = new RegExp(\"^([0-9A-F]+);\" +\n name + \";(.*)$\", \"m\")\n search = re.exec($B.unicodedb)\n if(search === null){\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n name + \"'\")\n }\n var res = parseInt(search[1], 16)\n return _b_.chr(res)\n }\n\n function name(chr, _default){\n // Returns the name assigned to the character chr as a string. If no\n // name is defined, default is returned, or, if not given, ValueError\n // is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(\"undefined character name '\" +\n chr + \"'\")\n }\n return search.name\n }\n\n function _norm(form, chr){\n var search = _info(chr)\n if(search === null){\n throw _b_.KeyError.$factory(chr)\n }\n switch(form){\n case \"NFC\":\n return chr\n case \"NFD\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \"),\n res = \"\"\n if(parts[0].startsWith(\"<\")){\n return chr\n }\n parts.forEach(function(part){\n if(! part.startsWith(\"<\")){\n res += _b_.chr(parseInt(part, 16))\n }\n })\n return res\n case \"NFKC\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return chr\n case \"NFKD\":\n var decomp = decomposition(chr),\n parts = decomp.split(\" \")\n if(parts[0] == \"\"){\n var res = \"\"\n parts.slice(1).forEach(function(part){\n res += _b_.chr(parseInt(part, 16))\n })\n return res\n }\n return chr\n\n default:\n throw _b_.ValueError.$factory(\"invalid normalization form\")\n }\n }\n\n function normalize(form, unistr){\n var res = \"\"\n for(var i = 0, len = unistr.length; i < len; i++){\n res += _norm(form, unistr.charAt(i))\n }\n return res\n }\n\n function numeric(chr, _default){\n // Returns the decimal value assigned to the character chr as integer.\n // If no such value is defined, default is returned, or, if not given,\n // ValueError is raised.\n var search = _info(chr)\n if(search === null){\n if(_default){return _default}\n throw _b_.KeyError.$factory(chr)\n }\n return new Number(eval(search.numeric))\n }\n\n var module = {\n bidirectional: bidirectional,\n category: category,\n combining: combining,\n decimal: decimal,\n decomposition: decomposition,\n digit: digit,\n lookup: lookup,\n name: name,\n normalize: normalize,\n numeric: numeric,\n unidata_version: \"11.0.0\"\n }\n module.ucd_3_2_0 = {}\n for(var key in module){\n if(key == \"unidata_version\"){\n module.ucd_3_2_0[key] = '3.2.0'\n }else{\n module.ucd_3_2_0[key] = module[key] // approximation...\n }\n }\n return module\n\n})(__BRYTHON__)"], "_aio": [".js", "// Replacement for asyncio.\n//\n// CPython asyncio can't be implemented for Brython because it relies on\n// blocking function (eg run(), run_until_complete()), and such functions\n// can't be defined in Javascript. It also manages an event loop, and a\n// browser only has its own built-in event loop.\n//\n// This module exposes functions whose result can be \"await\"-ed inside\n// asynchrounous functions defined by \"async def\".\n\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\n\nvar responseType = {\n \"text\": \"text\",\n \"binary\": \"arraybuffer\",\n \"dataURL\": \"arraybuffer\"\n}\n\nfunction handle_kwargs(kw, method){\n var data,\n cache = false,\n format = \"text\",\n headers = {},\n timeout = {}\n for(var key in kw.$string_dict){\n if(key == \"data\"){\n var params = kw.$string_dict[key][0]\n if(typeof params == \"string\"){\n data = params\n }else if(_b_.isinstance(params, _b_.bytes)){\n data = new ArrayBuffer(params.source.length)\n var array = new Int8Array(data)\n for(var i = 0, len = params.source.length; i < len; i++){\n array[i] = params.source[i]\n }\n }else{\n if(params.__class__ !== _b_.dict){\n throw _b_.TypeError.$factory(\"wrong type for data, \" +\n \"expected dict, bytes or str, got \" + \n $B.class_name(params))\n }\n params = params.$string_dict\n var items = []\n for(var key in params){\n items.push(encodeURIComponent(key) + \"=\" +\n encodeURIComponent(params[key][0]))\n }\n data = items.join(\"&\")\n }\n }else if(key == \"headers\"){\n headers = _b_.dict.$to_obj(kw.$string_dict[key][0])\n }else if(key.startsWith(\"on\")){\n var event = key.substr(2)\n if(event == \"timeout\"){\n timeout.func = kw.$string_dict[key][0]\n }else{\n ajax.bind(self, event, kw.$string_dict[key][0])\n }\n }else if(key == \"timeout\"){\n timeout.seconds = kw.$string_dict[key][0]\n }else if(key == \"cache\"){\n cache = kw.$string_dict[key][0]\n }else if(key == \"format\"){\n format = kw.$string_dict[key][0]\n }\n }\n if(method == \"post\"){\n // For POST requests, set default header\n if(! headers.hasOwnProperty(\"Content-type\")){\n headers[\"Content-Type\"] = \"application/x-www-form-urlencoded\"\n }\n if(data && !headers.hasOwnProperty(\"Content-Length\")){\n headers[\"Content-Length\"] = data.length\n }\n }\n return {\n body: data,\n cache: cache,\n format: format,\n timeout: timeout,\n headers: headers\n }\n}\n\nfunction ajax(){\n var $ = $B.args(\"ajax\", 2, {method: null, url: null},\n [\"method\", \"url\"], arguments, {},\n null, \"kw\"),\n method = $.method.toUpperCase(),\n url = $.url,\n kw = $.kw\n var args = handle_kwargs(kw, \"get\")\n if(method == \"GET\" && ! args.cache){\n url = url + \"?ts\" + (new Date()).getTime() + \"=0\"\n }\n if(args.body && method == \"GET\"){\n url = url + (args.cache ? \"?\" : \"&\") + args.body\n }\n var func = function(){\n return new Promise(function(resolve, reject){\n var xhr = new XMLHttpRequest()\n xhr.open(method, url, true)\n for(key in args.headers){\n xhr.setRequestHeader(key, args.headers[key])\n }\n xhr.format = args.format\n xhr.responseType = responseType[args.format]\n xhr.onreadystatechange = function(){\n if(this.readyState == 4){\n this.__class__ = HTTPRequest\n resolve(this)\n }\n }\n if(method == \"POST\" && args.body){\n xhr.send(args.body)\n }else{\n xhr.send()\n }\n })\n }\n func.$infos = {\n __name__: \"ajax_\" + method\n }\n return {\n __class__: $B.coroutine,\n $args: [url, args],\n $func: func\n }\n}\n\nfunction event(){\n // event(element, *names) is a Promise on the events \"names\" happening on\n // the element. This promise always resolves (never rejects) with the\n // first triggered DOM event.\n var $ = $B.args(\"event\", 1, {element: null},\n [\"element\"], arguments, {}, \"names\", null),\n element = $.element,\n names = $.names\n return new Promise(function(resolve){\n var callbacks = []\n names.forEach(function(name){\n var callback = function(evt){\n // When one of the handled events is triggered, all bindings\n // are removed\n callbacks.forEach(function(items){\n $B.DOMNode.unbind(element, items[0], items[1])\n })\n resolve($B.$DOMEvent(evt))\n }\n callbacks.push([name, callback])\n $B.DOMNode.bind(element, name, callback)\n })\n })\n}\n\nvar HTTPRequest = $B.make_class(\"Request\")\n\nHTTPRequest.data = _b_.property.$factory(function(self){\n if(self.format == \"binary\"){\n var view = new Uint8Array(self.response)\n return _b_.bytes.$factory(Array.from(view))\n }else if(self.format == \"text\"){\n return self.responseText\n }else if(self.format == \"dataURL\"){\n var base64String = btoa(String.fromCharCode.apply(null,\n new Uint8Array(self.response)))\n return \"data:\" + self.getResponseHeader(\"Content-Type\") +\n \";base64,\" + base64String\n }\n})\n\nHTTPRequest.response_headers = _b_.property.$factory(function(self){\n var headers = self.getAllResponseHeaders()\n if(headers === null){return _b_.None}\n var res = $B.empty_dict()\n if(headers.length > 0){\n // Convert the header string into an array\n // of individual headers\n var lines = headers.trim().split(/[\\r\\n]+/)\n // Create a map of header names to values\n lines.forEach(function(line){\n var parts = line.split(': ')\n var header = parts.shift()\n var value = parts.join(': ')\n _b_.dict.$setitem(res, header, value)\n })\n }\n return res\n})\n\nfunction get(){\n return ajax.bind(null, \"GET\").apply(null, arguments)\n}\n\nfunction iscoroutine(f){\n return f.__class__ === $B.coroutine\n}\n\nfunction iscoroutinefunction(f){\n return (f.$infos.__code__.co_flags & 128) != 0\n}\n\nfunction post(){\n return ajax.bind(null, \"POST\").apply(null, arguments)\n}\n\nfunction run(coro){\n var handle_success = function(){\n $B.leave_frame()\n },\n handle_error = function(err){\n // coro.$stack is a snapshot of the frames stack when the async\n // function was called. Restore it to get the correct call tree\n console.log(\"Exception in asynchronous function\")\n err.$stack = coro.$stack.concat([$B.last(err.$stack)])\n $B.handle_error(err)\n }\n\n var $ = $B.args(\"run\", 3, {coro: null, onsuccess: null, onerror: null},\n [\"coro\", \"onsuccess\", \"onerror\"], arguments,\n {onsuccess: handle_success, onerror: handle_error},\n null, null),\n coro = $.coro,\n onsuccess = $.onsuccess,\n onerror = $.onerror\n\n if(onerror !== handle_error){\n function error_func(exc){\n try{\n onerror(exc)\n }catch(err){\n handle_error(err)\n }\n }\n }else{\n error_func = handle_error\n }\n var save_stack = $B.frames_stack.slice()\n $B.coroutine.send(coro).then(onsuccess).catch(error_func)\n $B.frames_stack = save_stack\n return _b_.None\n}\n\nfunction sleep(seconds){\n var func = function(){\n return new Promise(resolve => setTimeout(\n function(){resolve(_b_.None)}, 1000 * seconds))\n }\n func.$infos = {\n __name__: \"sleep\"\n }\n return {\n __class__: $B.coroutine,\n $args: [seconds],\n $func: func\n }\n}\n\nreturn {\n ajax: ajax,\n event: event,\n get: get,\n iscoroutine: iscoroutine,\n iscoroutinefunction: iscoroutinefunction,\n post: post,\n run: run,\n sleep: sleep\n}\n\n})(__BRYTHON__)\n"], "_ajax": [".js", "// ajax\nvar $module = (function($B){\n\neval($B.InjectBuiltins())\nvar $N = $B.builtins.None,\n _b_ = $B.builtins\n\nvar add_to_res = function(res, key, val) {\n if(isinstance(val, list)){\n for (j = 0; j < val.length; j++) {\n add_to_res(res, key, val[j])\n }\n }else if (val instanceof File || val instanceof Blob){\n res.append(key, val)\n }else{res.append(key,str.$factory(val))}\n}\n\nfunction set_timeout(self, timeout){\n if(timeout.seconds !== undefined){\n self.js.$requestTimer = setTimeout(\n function() {\n self.js.abort()\n if(timeout.func){\n timeout.func()\n }\n },\n timeout.seconds * 1000)\n }\n}\n\nfunction _read(req){\n var xhr = req.js,\n res\n if(xhr.responseType == \"json\"){\n return $B.structuredclone2pyobj(xhr.response)\n }else if(xhr.responseType == \"\" || xhr.responseType == \"text\"){\n return xhr.responseText\n }\n var abuf = new Uint8Array(xhr.response)\n res = []\n for(var i = 0, len = abuf.length; i < len; i++){\n res.push(abuf[i])\n }\n var b = _b_.bytes.$factory(res)\n\n if(xhr.mode == \"binary\"){\n return b\n }else{\n var encoding = xhr.encoding || \"utf-8\"\n return _b_.bytes.decode(b, encoding)\n }\n}\n\nfunction handle_kwargs(self, kw, method){\n var data,\n encoding,\n headers,\n cache,\n mode = \"text\",\n timeout = {}\n for(var key in kw.$string_dict){\n if(key == \"data\"){\n var params = kw.$string_dict[key][0]\n if(typeof params == \"string\"){\n data = params\n }else if(params.__class__ === _b_.dict){\n params = params.$string_dict\n var items = []\n for(var key in params){\n items.push(encodeURIComponent(key) + \"=\" +\n encodeURIComponent(params[key][0]))\n }\n data = items.join(\"&\")\n }else{\n throw _b_.TypeError.$factory(\"wrong type for data: \" +\n $B.class_name(params))\n }\n }else if(key == \"encoding\"){\n encoding = kw.$string_dict[key][0]\n self.js.encoding = encoding\n }else if(key == \"headers\"){\n var value = kw.$string_dict[key][0]\n if(! _b_.isinstance(value, _b_.dict)){\n throw _b_.ValueError.$factory(\n \"headers must be a dict, not \" + $B.class_name(value))\n }\n headers = value.$string_dict\n for(var key in headers){\n self.js.setRequestHeader(key, headers[key][0])\n }\n }else if(key.startsWith(\"on\")){\n var event = key.substr(2)\n if(event == \"timeout\"){\n timeout.func = kw.$string_dict[key][0]\n }else{\n var f = kw.$string_dict[key][0]\n ajax.bind(self, event, f)\n }\n }else if(key == \"mode\"){\n var mode = kw.$string_dict[key][0]\n if(mode == \"json\"){\n self.js.responseType = \"json\"\n }else{\n self.js.responseType = \"arraybuffer\"\n if(mode != \"text\" && mode != \"binary\"){\n throw _b_.ValueError.$factory(\"invalid mode: \" + mode)\n }\n }\n self.js.mode = mode\n }else if(key == \"timeout\"){\n timeout.seconds = kw.$string_dict[key][0]\n }else if(key == \"cache\"){\n cache = kw.$string_dict[key][0]\n }\n }\n if(encoding && mode != \"text\"){\n throw _b_.ValueError.$factory(\"encoding not supported for mode \" +\n mode)\n }\n if((method == \"post\" || method == \"put\") && ! headers){\n // For POST requests, set default header\n self.js.setRequestHeader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n }\n return {\n cache: cache,\n data:data,\n encoding: encoding,\n mode: mode,\n timeout: timeout\n }\n}\n\nvar ajax = {\n __class__: _b_.type,\n __mro__: [_b_.object],\n\n __repr__ : function(self){return ''},\n __str__ : function(self){return ''},\n\n $infos: {\n __module__: \"builtins\",\n __name__: \"ajax\"\n },\n\n __getattribute__: function(self, attr){\n if(ajax[attr] !== undefined){\n return function(){\n return ajax[attr].call(null, self, ...arguments)\n }\n }else if(self.js[attr] !== undefined){\n if(typeof self.js[attr] == \"function\"){\n return function(){\n if(attr == \"setRequestHeader\"){\n self.$has_request_header = true\n }else if(attr == \"open\"){\n self.$method = arguments[0]\n }\n return self.js[attr](...arguments)\n }\n }else{\n return self.js[attr]\n }\n }else if(attr == \"text\"){\n return self.js.responseText\n }else if(attr == \"xml\"){\n return self.js.responseXML\n }\n },\n\n\n bind: function(self, evt, func){\n // req.bind(evt,func) is the same as req.onevt = func\n self.js['on' + evt] = function(){\n try{\n return func.apply(null, arguments)\n }catch(err){\n if(err.__class__ !== undefined){\n var msg = _b_.getattr(err, 'info') +\n '\\n' + err.__class__.$infos.__name__\n if(err.args){msg += ': ' + err.args[0]}\n try{getattr($B.stderr, \"write\")(msg)}\n catch(err){console.log(msg)}\n }else{\n try{getattr($B.stderr, \"write\")(err)}\n catch(err1){console.log(err)}\n }\n }\n }\n return $N\n },\n\n send: function(self, params){\n // params can be Python dictionary or string\n var res = ''\n if(!params){\n self.js.send()\n return $N\n }else if(isinstance(params, str)){\n res = params\n }else if(isinstance(params, dict)){\n if(self.headers['content-type'] == 'multipart/form-data'){\n // The FormData object serializes the data in the 'multipart/form-data'\n // content-type so we may as well override that header if it was set\n // by the user.\n res = new FormData()\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n add_to_res(res, str.$factory(items[i][0]), items[i][1])\n }\n }else{\n if(self.$method && self.$method.toUpperCase() == \"POST\" &&\n ! self.$has_request_header){\n self.js.setRequestHeader(\"Content-Type\",\n \"application/x-www-form-urlencoded\")\n }\n var items = _b_.list.$factory(_b_.dict.items(params))\n for(var i = 0, len = items.length; i < len; i++){\n var key = encodeURIComponent(str.$factory(items[i][0]));\n if(isinstance(items[i][1], list)){\n for (j = 0; j < items[i][1].length; j++) {\n res += key +'=' +\n encodeURIComponent(str.$factory(items[i][1][j])) + '&'\n }\n }else{\n res += key + '=' +\n encodeURIComponent(str.$factory(items[i][1])) + '&'\n }\n }\n res = res.substr(0, res.length - 1)\n }\n }else{\n throw _b_.TypeError.$factory(\n \"send() argument must be string or dictionary, not '\" +\n str.$factory(params.__class__) + \"'\")\n }\n self.js.send(res)\n return $N\n },\n\n set_header: function(self,key,value){\n self.js.setRequestHeader(key,value)\n self.headers[key.toLowerCase()] = value.toLowerCase()\n },\n\n set_timeout: function(self, seconds, func){\n self.js.$requestTimer = setTimeout(\n function() {self.js.abort();func()},\n seconds * 1000)\n }\n\n}\n\najax.$factory = function(){\n\n if(window.XMLHttpRequest){// code for IE7+, Firefox, Chrome, Opera, Safari\n var xmlhttp = new XMLHttpRequest()\n }else{// code for IE6, IE5\n var xmlhttp = new ActiveXObject(\"Microsoft.XMLHTTP\")\n }\n xmlhttp.onreadystatechange = function(){\n // here, \"this\" refers to xmlhttp\n var state = this.readyState\n if(this.responseType == \"\" || this.responseType == \"text\"){\n res.js.text = this.responseText\n }\n var timer = this.$requestTimer\n if(state == 0 && this.onuninitialized){this.onuninitialized(res)}\n else if(state == 1 && this.onloading){this.onloading(res)}\n else if(state == 2 && this.onloaded){this.onloaded(res)}\n else if(state == 3 && this.oninteractive){this.oninteractive(res)}\n else if(state == 4 && this.oncomplete){\n if(timer !== null){window.clearTimeout(timer)}\n this.oncomplete(res)\n }\n }\n var res = {\n __class__: ajax,\n js: xmlhttp,\n headers: {}\n }\n return res\n}\n\nfunction _request_without_body(method){\n var $ = $B.args(method, 3, {method: null, url: null, blocking: null},\n [\"method\", \"url\", \"blocking\"], arguments, {blocking: false},\n null, \"kw\"),\n method = $.method,\n url = $.url,\n async = !$.blocking,\n kw = $.kw\n var self = ajax.$factory()\n self.js.open(method.toUpperCase(), url, async)\n var items = handle_kwargs(self, kw, method),\n qs = items.data,\n timeout = items.timeout\n set_timeout(self, timeout)\n if(qs){\n url += \"?\" + qs\n }\n if(! (items.cache === true)){\n url += (qs ? \"&\" : \"?\") + (new Date()).getTime()\n }\n // Add function read() to return str or bytes according to mode\n self.js.read = function(){\n return _read(self)\n }\n self.js.send()\n}\n\nfunction _request_with_body(method){\n var $ = $B.args(method, 3, {method: null, url: null, blocking: null},\n [\"method\", \"url\", \"blocking\"], arguments, {blocking: false},\n null, \"kw\"),\n method = $.method,\n url = $.url,\n async = !$.blocking,\n kw = $.kw\n\n var self = ajax.$factory()\n self.js.open(method.toUpperCase(), url, async)\n var items = handle_kwargs(self, kw, method),\n data = items.data,\n timeout = items.timeout\n set_timeout(self, timeout)\n // Add function read() to return str or bytes according to mode\n self.js.read = function(){\n return _read(self)\n }\n self.js.send(data)\n}\n\nfunction _delete(){\n _request_without_body.call(null, \"delete\", ...arguments)\n}\n\nfunction get(){\n _request_without_body.call(null, \"get\", ...arguments)\n}\n\nfunction head(){\n _request_without_body.call(null, \"head\", ...arguments)\n}\n\nfunction options(){\n _request_without_body.call(null, \"options\", ...arguments)\n}\n\nfunction post(){\n _request_with_body.call(null, \"post\", ...arguments)\n}\n\nfunction put(){\n _request_with_body.call(null, \"put\", ...arguments)\n}\n\nfunction file_upload(){\n // ajax.file_upload(url, file, method=\"POST\", **callbacks)\n var $ = $B.args(\"file_upload\", 2, {url: null, \"file\": file},\n [\"url\", \"file\"], arguments, {}, null, \"kw\"),\n url = $.url,\n file = $.file,\n kw = $.kw\n\n var self = ajax.$factory(),\n method = 'POST',\n field_name = 'filetosave'\n\n if(kw.$string_dict.method !== undefined){\n method = kw.$string_dict.method[0]\n }\n\n if(kw.$string_dict.field_name !== undefined){\n field_name = kw.$string_dict.field_name[0]\n }\n\n var formdata = new FormData()\n formdata.append(field_name, file, file.name)\n\n self.js.open(method, url, True)\n self.js.send(formdata)\n\n for(key in kw.$string_dict){\n if(key.startsWith(\"on\")){\n ajax.bind(self, key.substr(2), kw.$string_dict[key][0])\n }\n }\n}\n\n$B.set_func_names(ajax)\n\nreturn {\n ajax: ajax,\n Ajax: ajax,\n delete: _delete,\n file_upload: file_upload,\n get: get,\n head: head,\n options: options,\n post: post,\n put: put\n}\n\n})(__BRYTHON__)\n"], "_base64": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar Base64 = {\n error: function(){return 'binascii_error'},\n\n encode: function(bytes, altchars){\n\n var input = bytes.source,\n output = \"\",\n chr1, chr2, chr3, enc1, enc2, enc3, enc4\n var i = 0\n\n var alphabet = make_alphabet(altchars)\n\n while(i < input.length){\n\n chr1 = input[i++]\n chr2 = input[i++]\n chr3 = input[i++]\n\n enc1 = chr1 >> 2\n enc2 = ((chr1 & 3) << 4) | (chr2 >> 4)\n enc3 = ((chr2 & 15) << 2) | (chr3 >> 6)\n enc4 = chr3 & 63\n\n if(isNaN(chr2)){\n enc3 = enc4 = 64\n }else if(isNaN(chr3)){\n enc4 = 64\n }\n\n output = output + alphabet.charAt(enc1) +\n alphabet.charAt(enc2) +\n alphabet.charAt(enc3) +\n alphabet.charAt(enc4)\n\n }\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n },\n\n\n decode: function(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw Base64.error(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw Base64.error(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n\n },\n\n _utf8_encode: function(string) {\n string = string.replace(/\\r\\n/g, \"\\n\")\n var utftext = \"\";\n\n for(var n = 0; n < string.length; n++){\n\n var c = string.charCodeAt(n)\n\n if(c < 128){\n utftext += String.fromCharCode(c)\n }else if((c > 127) && (c < 2048)){\n utftext += String.fromCharCode((c >> 6) | 192)\n utftext += String.fromCharCode((c & 63) | 128)\n }else{\n utftext += String.fromCharCode((c >> 12) | 224)\n utftext += String.fromCharCode(((c >> 6) & 63) | 128)\n utftext += String.fromCharCode((c & 63) | 128)\n }\n\n }\n\n return utftext\n },\n\n _utf8_decode: function(utftext) {\n var string = \"\",\n i = 0,\n c = c1 = c2 = 0\n\n while(i < utftext.length){\n\n c = utftext.charCodeAt(i)\n\n if(c < 128){\n string += String.fromCharCode(c)\n i++\n }else if((c > 191) && (c < 224)){\n c2 = utftext.charCodeAt(i + 1)\n string += String.fromCharCode(((c & 31) << 6) | (c2 & 63))\n i += 2\n }else{\n c2 = utftext.charCodeAt(i + 1)\n c3 = utftext.charCodeAt(i + 2)\n string += String.fromCharCode(\n ((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))\n i += 3\n }\n\n }\n\n return string\n }\n\n}\n\nreturn {Base64:Base64}\n}\n\n)(__BRYTHON__)"], "_binascii": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins,\n _keyStr = \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n\nvar error = $B.make_class(\"error\", _b_.Exception.$factory)\nerror.__bases__ = [_b_.Exception]\n$B.set_func_names(error, \"binascii\")\n\nfunction decode(bytes, altchars, validate){\n var output = [],\n chr1, chr2, chr3,\n enc1, enc2, enc3, enc4\n\n var alphabet = make_alphabet(altchars)\n\n var input = bytes.source\n\n // If validate is set, check that all characters in input\n // are in the alphabet\n var _input = ''\n var padding = 0\n for(var i = 0, len = input.length; i < len; i++){\n var car = String.fromCharCode(input[i])\n var char_num = alphabet.indexOf(car)\n if(char_num == -1){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i < input.length - 2){\n if(validate){throw error.$factory(\"Non-base64 digit found: \" +\n car)}\n }else if(char_num == 64 && i >= input.length - 2){\n padding++\n _input += car\n }else{\n _input += car\n }\n }\n input = _input\n if(_input.length == padding){return _b_.bytes.$factory([])}\n if( _input.length % 4 > 0){throw error.$factory(\"Incorrect padding\")}\n\n var i = 0\n while(i < input.length){\n\n enc1 = alphabet.indexOf(input.charAt(i++))\n enc2 = alphabet.indexOf(input.charAt(i++))\n enc3 = alphabet.indexOf(input.charAt(i++))\n enc4 = alphabet.indexOf(input.charAt(i++))\n\n chr1 = (enc1 << 2) | (enc2 >> 4)\n chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)\n chr3 = ((enc3 & 3) << 6) | enc4\n\n output.push(chr1)\n\n if(enc3 != 64){output.push(chr2)}\n if(enc4 != 64){output.push(chr3)}\n\n }\n // return Python bytes\n return _b_.bytes.$factory(output, 'utf-8', 'strict')\n}\n\n\nvar hex2int = {},\n hex = '0123456789abcdef'\nfor(var i = 0; i < hex.length; i++){\n hex2int[hex[i]] = i\n hex2int[hex[i].toUpperCase()] = i\n}\n\nfunction make_alphabet(altchars){\n var alphabet = _keyStr\n if(altchars !== undefined && altchars !== _b_.None){\n // altchars is an instance of Python bytes\n var source = altchars.source\n alphabet = alphabet.substr(0,alphabet.length-3) +\n _b_.chr(source[0]) + _b_.chr(source[1]) + '='\n }\n return alphabet\n}\n\nvar module = {\n a2b_base64: function(){\n var $ = $B.args(\"a2b_base64\", 1, {s: null}, ['s'],\n arguments, {}, null, null)\n return decode(_b_.str.encode($.s, 'ascii'))\n },\n a2b_hex: function(){\n var $ = $B.args(\"a2b_hex\", 1, {s: null}, ['s'],\n arguments, {}, null, null),\n s = $.s\n if(_b_.isinstance(s, _b_.bytes)){\n s = _b_.bytes.decode(s, 'ascii')\n }\n if(typeof s !== \"string\"){\n throw _b_.TypeError.$factory(\"argument should be bytes, \" +\n \"buffer or ASCII string, not '\" + $B.class_name(s) + \"'\")\n }\n \n var len = s.length\n if(len % 2 == 1){\n throw _b_.TypeError.$factory('Odd-length string')\n }\n \n var res = []\n for(var i = 0; i < len; i += 2){\n res.push((hex2int[s.charAt(i)] << 4) + hex2int[s.charAt(i + 1)])\n }\n return _b_.bytes.$factory(res)\n },\n b2a_base64: function(){\n var $ = $B.args(\"b2a_base64\", 1, {data: null}, ['data'],\n arguments, {}, null, \"kw\")\n var newline = false\n if($.kw && $.kw.$string_dict.newline){\n newline = $.kw.$string_dict.newline[0]\n }\n\n var string = $B.to_bytes($.data),\n res = btoa(String.fromCharCode.apply(null, string))\n if(newline){res += \"\\n\"}\n return _b_.bytes.$factory(res, \"ascii\")\n },\n b2a_hex: function(obj){\n var string = $B.to_bytes(obj),\n res = []\n function conv(c){\n if(c > 9){\n c = c + 'a'.charCodeAt(0) - 10\n }else{\n c = c + '0'.charCodeAt(0)\n }\n return c\n }\n string.forEach(function(char){\n res.push(conv((char >> 4) & 0xf))\n res.push(conv(char & 0xf))\n })\n return _b_.bytes.$factory(res, \"ascii\")\n },\n b2a_uu: function(obj){\n var string = $B.to_bytes(obj)\n var len = string.length,\n res = String.fromCharCode((0x20 + len) & 0x3F)\n while(string.length > 0){\n var s = string.slice(0, 3)\n while(s.length < 3){s.push(String.fromCharCode(0))}\n var A = s[0],\n B = s[1],\n C = s[2]\n var a = (A >> 2) & 0x3F,\n b = ((A << 4) | ((B >> 4) & 0xF)) & 0x3F,\n c = (((B << 2) | ((C >> 6) & 0x3)) & 0x3F),\n d = C & 0x3F\n res += String.fromCharCode(0x20 + a, 0x20 + b, 0x20 + c, 0x20 + d)\n string = string.slice(3)\n }\n return _b_.bytes.$factory(res + \"\\n\", \"ascii\")\n },\n error: error\n}\n\nmodule.hexlify = module.b2a_hex\nmodule.unhexlify = module.a2b_hex\n\nreturn module\n}\n)(__BRYTHON__)"], "_io_classes": [".js", "var _b_ = __BRYTHON__.builtins\n\nfunction get_self(name, args){\n return $B.args(name, 1, {self: null}, [\"self\"], args, {}, null, null).self\n}\n\nvar _IOBase = $B.make_class(\"_IOBase\")\n_IOBase.__mro__ = [_b_.object]\n\n_IOBase.close = function(){\n get_self(\"close\", arguments).__closed = true\n}\n\n_IOBase.flush = function(){\n get_self(\"flush\", arguments)\n return _b_.None\n}\n\n// Base class for binary streams that support some kind of buffering.\nvar _BufferedIOBase = $B.make_class(\"_BufferedIOBase\")\n_BufferedIOBase.__mro__ = [_IOBase, _b_.object]\n\n// Base class for raw binary I/O.\nvar _RawIOBase = $B.make_class(\"_RawIOBase\")\n\n_RawIOBase.__mro__ = [_IOBase, _b_.object]\n\n_RawIOBase.read = function(){\n var $ = $B.args(\"read\", 2, {self: null, size: null}, [\"self\", \"size\"],\n arguments, {size: -1}, null, null),\n self = $.self,\n size = $.size,\n res\n self.$pos = self.$pos || 0\n if(size == -1){\n if(self.$pos == 0){\n res = self.$content\n }else{\n res = _b_.bytes.$factory(self.$content.source.slice(self.$pos))\n }\n self.$pos = self.$content.source.length - 1\n }else{\n res = _b_.bytes.$factory(self.$content.source.slice(self.$pos, size))\n self.$pos += size\n }\n return res\n}\n\n_RawIOBase.readall = function(){\n return _RawIOBase.read(get_self(\"readall\", arguments))\n}\n\n// Base class for text streams.\n_TextIOBase = $B.make_class(\"_TextIOBase\")\n_TextIOBase.__mro__ = [_IOBase, _b_.object]\n\nvar StringIO = $B.make_class(\"StringIO\",\n function(){\n var $ = $B.args(\"StringIO\", 2, {value: null, newline: null},\n [\"value\", \"newline\"], arguments, {value: '', newline: \"\\n\"},\n null, null)\n return {\n __class__: StringIO,\n $counter: 0,\n $string: $.value\n }\n }\n)\nStringIO.__mro__ = [$B.Reader, _b_.object]\n\nStringIO.getvalue = function(){\n var $ = $B.args(\"getvalue\", 1, {self: null},\n [\"self\"], arguments, {}, null, null)\n return $.self.$string\n}\n\nStringIO.write = function(){\n var $ = $B.args(\"write\", 2, {self: null, data: null},\n [\"self\", \"data\"], arguments, {}, null, null)\n $.self.$string += $.data\n $.self.$counter += $.data.length\n return _b_.None\n}\n$B.set_func_names(StringIO, \"_io\")\n\nvar BytesIO = $B.make_class(\"BytesIO\",\n function(){\n var $ = $B.args(\"BytesIO\", 1, {value: null},\n [\"value\"], arguments, {value: _b_.bytes.$factory()},\n null, null)\n return {\n __class__: BytesIO,\n $binary: true,\n $bytes: $.value,\n $counter: 0\n }\n }\n)\nBytesIO.__mro__ = [$B.Reader, _b_.object]\n\nBytesIO.getbuffer = function(){\n var self = get_self(\"getbuffer\", arguments)\n return self.$bytes\n}\n\nBytesIO.getvalue = function(){\n var self = get_self(\"getvalue\", arguments)\n return self.$bytes\n}\n\nBytesIO.write = function(){\n var $ = $B.args(\"write\", 2, {self: null, data: null},\n [\"self\", \"data\"], arguments, {}, null, null)\n $.self.$bytes.source = $.self.$bytes.source.concat(\n $.data.source)\n $.self.$counter += $.data.source.length\n return _b_.None\n}\n$B.set_func_names(BytesIO, \"_io\")\n\nvar $module = (function($B){\n return {\n _BufferedIOBase: _BufferedIOBase,\n _IOBase: _IOBase,\n _RawIOBase: _RawIOBase,\n _TextIOBase: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BytesIO: BytesIO,\n FileIO: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n StringIO: StringIO,\n BufferedReader: $B.BufferedReader,\n BufferedWriter: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BufferedRWPair: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n BufferedRandom: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n IncrementalNewlineDecoder: $B.make_class(\"_TextIOBase\",\n function(){\n return \"fileio\"\n }\n ),\n TextIOWrapper: $B.TextIOWrapper\n }\n})(__BRYTHON__)\n$module._IOBase.__doc__ = \"_IOBase\""], "_json": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins\n\nfunction simple(obj){\n switch(typeof obj){\n case 'string':\n case 'number':\n case 'boolean':\n return true\n }\n if(obj instanceof Number ||\n Array.isArray(obj) ||\n _b_.isinstance(obj, [_b_.list, _b_.tuple, _b_.dict])){\n return true\n }\n return false\n}\n\nfunction to_json(obj, level){\n var $defaults = {skipkeys:_b_.False, ensure_ascii:_b_.True,\n check_circular:_b_.True, allow_nan:_b_.True, cls:_b_.None,\n indent:_b_.None, separators:_b_.None, \"default\":_b_.None,\n sort_keys:_b_.False},\n $ = $B.args(\"to_json\", 2, {obj: null, level: null}, ['obj', 'level'],\n arguments, {level: 1}, null, \"kw\"),\n kw = $.kw.$string_dict\n\n for(key in $defaults){\n if(kw[key] === undefined){\n kw[key] = $defaults[key]\n }else{\n kw[key] = kw[key][0]\n }\n }\n\n var indent = kw.indent,\n ensure_ascii = kw.ensure_ascii,\n separators = kw.separators === _b_.None ?\n kw.indent === _b_.None ? [', ', ': '] : [',', ': '] :\n kw.separators,\n skipkeys = kw.skipkeys,\n $$default = kw.default,\n sort_keys = kw.sort_keys,\n allow_nan = kw.allow_nan,\n check_circular = kw.check_circular\n var item_separator = separators[0],\n key_separator = separators[1]\n if(indent !== _b_.None){\n var indent_str\n if(typeof indent == \"string\"){\n indent_str = indent\n }else if(typeof indent == \"number\" && indent >= 1){\n indent_str = \" \".repeat(indent)\n }else{\n throw _b_.ValueError.$factory(\"invalid indent: \" +\n _b_.str.$factory(indent))\n }\n }\n var kwarg = {$nat: \"kw\", kw: {}}\n for(var key in kw){\n kwarg.kw[key] = kw[key]\n }\n switch(typeof obj){\n case 'string':\n var res = JSON.stringify(obj)\n if(ensure_ascii){\n var escaped = ''\n for(var i = 0, len = res.length; i < len; i++){\n var u = res.codePointAt(i)\n if(u > 127){\n u = u.toString(16)\n while(u.length < 4){\n u = \"0\" + u\n }\n escaped += '\\\\u' + u\n }else{\n escaped += res.charAt(i)\n }\n }\n return escaped\n }\n return res\n case 'boolean':\n return obj.toString()\n case 'number':\n if([Infinity, -Infinity].indexOf(obj) > -1 ||\n isNaN(obj)){\n if(! allow_nan){\n throw _b_.ValueError.$factory(\n 'Out of range float values are not JSON compliant')\n }\n }\n return obj.toString()\n }\n if(_b_.isinstance(obj, _b_.list)){\n var res = []\n var sep = item_separator,\n first = '[',\n last = ']'\n if(indent !== _b_.None){\n sep += \"\\n\" + indent_str.repeat(level)\n first = '[' + '\\n' + indent_str.repeat(level)\n last = '\\n' + indent_str.repeat(level - 1) + ']'\n level++\n }\n for(var i = 0, len = obj.length; i < len; i++){\n res.push(to_json(obj[i], level, kwarg))\n }\n return first + res.join(sep) + last\n }else if(obj instanceof Number){\n return obj.valueOf()\n }else if(obj === _b_.None){\n return \"null\"\n }else if(_b_.isinstance(obj, _b_.dict)){\n var res = [],\n items = $B.dict_to_list(obj)\n if(sort_keys){\n // Sort keys by alphabetical order\n items.sort()\n }\n var sep = item_separator,\n first = '{',\n last = '}'\n if(indent !== _b_.None){\n sep += \"\\n\" + indent_str.repeat(level)\n first = '{' + '\\n' + indent_str.repeat(level)\n last = '\\n' + indent_str.repeat(level - 1) + '}'\n level++\n }\n for(var i = 0, len = items.length; i < len; i++){\n var item = items[i]\n if(! simple(item[0])){\n if(! skipkeys){\n throw _b_.TypeError.$factory(\"keys must be str, int, \" +\n \"float, bool or None, not \" + $B.class_name(obj))\n }\n }else{\n // In the result, key must be a string\n var key = _b_.str.$factory(item[0])\n // Check circular reference\n if(check_circular && $B.repr.enter(item[1])){\n throw _b_.ValueError.$factory(\"Circular reference detected\")\n }\n res.push(\n [to_json(key, level, kwarg), to_json(item[1], level, kwarg)].\n join(key_separator))\n if(check_circular){\n $B.repr.leave(item[1])\n }\n }\n }\n return first + res.join(sep) + last\n }\n // For other types, use function default if provided\n if($$default == _b_.None){\n throw _b_.TypeError.$factory(\"Object of type \" + $B.class_name(obj) +\n \" is not JSON serializable\")\n }else{\n return to_json($B.$call($$default)(obj), level, kwarg)\n }\n}\n\nfunction from_json(s){\n var $defaults = {cls: _b_.None, object_hook: _b_.None,\n parse_float: _b_.None, parse_int: _b_.None,\n parse_constant: _b_.None, object_pairs_hook: _b_.None},\n $ = $B.args(\"from_json\", 1, {s: null}, ['s'], arguments, {},\n null, \"kw\"),\n kw = $.kw.$string_dict\n if(Object.keys(kw).length == 0){\n // default\n return $B.structuredclone2pyobj(JSON.parse(s))\n }\n for(key in $defaults){\n if(kw[key] === undefined){\n kw[key] = $defaults[key]\n }else{\n kw[key] = kw[key][0]\n }\n }\n\n function reviver(key, value){\n if(typeof value == \"number\"){\n if(Number.isInteger(value) && kw.parse_int !== _b_.None){\n return $B.$call(kw.parse_int)(value.toString())\n }else if(! Number.isInteger(value) && kw.parse_float !== _b_.None){\n return $B.$call(kw.parse_float)(value.toString())\n }else if((value === Infinity || value === -Infinity) &&\n kw.parse_constant !== _b_.None){\n return $B.$call(kw.parse_constant)(value)\n }else{\n return value\n }\n }else if(isNaN(value) && kw.parse_constant !== _b_.None){\n return $B.$call(kw.parse_constant)(value)\n }else if(typeof value == \"object\" && !Array.isArray(value) &&\n (kw.object_hook !== _b_.None ||\n kw.object_pairs_hook !== _b_.None)){\n // Apply Python function object_hook to the Python dictionary\n // built from the Javascript object \"value\"\n var py_dict = $B.structuredclone2pyobj(value)\n if(kw.object_pairs_hook === _b_.None){\n var res = $B.$call(kw.object_hook)(py_dict)\n }else{\n var items = $B.dict_to_list(py_dict),\n res = $B.$call(kw.object_pairs_hook)(items)\n }\n // Transform the result of the Python function to a Javascript\n // object\n return $B.pyobj2structuredclone(res)\n }else{\n return value\n }\n }\n\n return $B.structuredclone2pyobj(JSON.parse(s, reviver))\n}\n\nreturn {\n dumps: function(){\n return _b_.str.$factory(to_json.apply(null, arguments))\n },\n loads: from_json\n}\n\n})(__BRYTHON__)"], "_jsre": [".js", "var $module = (function($B){\n\n var _b_ = $B.builtins\n var $s = []\n for(var $b in _b_) $s.push('var ' + $b +'=_b_[\"' + $b + '\"]')\n eval($s.join(';'))\n\n var MatchObject = $B.make_class(\"Match\",\n function(jsmatch, string, pattern){\n return {\n __class__: MatchObject,\n jsmatch: jsmatch,\n string: string\n }\n }\n )\n MatchObject.item = function(self, rank){\n return self.jsmatch[rank]\n }\n MatchObject.group = function(self){\n var res = []\n for(var i = 0, _len_i = arguments.length; i < _len_i; i++){\n if(self.jsmatch[arguments[i]] === undefined){res.push(None)}\n else{res.push(self.jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return tuple.$factory(res)\n }\n MatchObject.groups = function(self, _default){\n if(_default===undefined){_default=None}\n var res = []\n for(var i = 1, _len_i = self.length; i < _len_i; i++){\n if(self.jsmatch[i] === undefined){res.push(_default)}\n else{res.push(self.jsmatch[i])}\n }\n return tuple.$factory(res)\n }\n MatchObject.start = function(self){\n return self.index\n }\n MatchObject.end = function(self){\n return self.length - self.index\n }\n\n $B.set_func_names(MatchObject, '_jsre')\n\n var obj = {__class__: $module,\n __str__: function(){return \"\"}\n }\n obj.A = obj.ASCII = 256\n obj.I = obj.IGNORECASE = 2 // 'i'\n obj.L = obj.LOCALE = 4\n obj.M = obj.MULTILINE = 8 // 'm'\n obj.S = obj.DOTALL = 16\n obj.U = obj.UNICODE = 32\n obj.X = obj.VERBOSE = 64\n obj._is_valid = function(pattern) {\n if ($B.$options.re == 'pyre'){return false} //force use of python's re module\n if ($B.$options.re == 'jsre'){return true} //force use of brythons re module\n // FIXME: Improve\n\n if(! isinstance(pattern, str)){\n // this is probably a SRE_PATTERN, so return false, and let\n // python's re module handle this.\n return false\n }\n var is_valid = false\n try{\n new RegExp(pattern)\n is_valid = true\n }\n catch(e){}\n if(! is_valid){return false} //if js won't parse the pattern return false\n\n // using reference http://www.regular-expressions.info/\n // to compare python re and javascript regex libraries\n\n // look for things javascript does not support\n // check for name capturing group\n var mylist = ['?P=', '?P<', '(?#', '(?<=', '(? -1) return false\n }\n\n var re_list=['\\{,\\d+\\}']\n for(var i=0, _len_i = re_list.length; i < _len_i; i++) {\n var _re = new RegExp(re_list[i])\n if (_re.test(pattern)){return false}\n }\n\n // it looks like the pattern has passed all our tests so lets assume\n // javascript can handle this pattern.\n return true\n }\n var $SRE_PatternDict = {\n __class__:_b_.type,\n $infos:{\n __name__:'SRE_Pattern'\n }\n }\n $SRE_PatternDict.__mro__ = [object]\n $SRE_PatternDict.findall = function(self, string){\n return obj.findall(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.finditer = function(self, string){\n return obj.finditer(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.match = function(self, string){\n return obj.match(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.search = function(self, string){\n return obj.search(self.pattern, string, self.flags)\n }\n $SRE_PatternDict.sub = function(self,repl,string){\n return obj.sub(self.pattern,repl,string,self.flags)\n }\n $B.set_func_names($SRE_PatternDict, \"_jsre\")\n // TODO: groups\n // TODO: groupindex\n function normflags(flags){\n return ((flags & obj.I)? 'i' : '') + ((flags & obj.M)? 'm' : '');\n }\n // TODO: fullmatch()\n // TODO: split()\n // TODO: subn()\n obj.compile = function(pattern, flags){\n return {\n __class__: $SRE_PatternDict,\n pattern: pattern,\n flags: normflags(flags)\n }\n }\n obj.escape = function(string){\n // Escape all the characters in pattern except ASCII letters, numbers\n // and '_'. This is useful if you want to match an arbitrary literal\n // string that may have regular expression metacharacters in it.\n var res = ''\n var ok = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'\n for(var i = 0, _len_i = string.length; i < _len_i; i++){\n if(ok.search(string.charAt(i))>-1){res += string.charAt(i)}\n }\n return res\n }\n obj.findall = function(pattern, string, flags){\n var $ns=$B.args('re.findall', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{}, 'args', 'kw') ,\n args = $ns['args'] ,\n _flags = 0;\n if(args.length>0){var flags = args[0]}\n else{var _flags = getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern,flags),\n jsmatch = string.match(jsp)\n if(jsmatch === null){return []}\n return jsmatch\n }\n obj.finditer = function(pattern, string, flags){\n var $ns=$B.args('re.finditer', 2,\n {pattern:null, string:null}, ['pattern', 'string'],\n arguments,{},'args','kw'),\n args = $ns['args'],\n _flags = 0;\n if(args.length>0){var flags=args[0]}\n else{var _flags = getattr($ns['kw'], 'get')('flags', 0)}\n\n var flags = normflags()\n flags += 'gm'\n var jsp = new RegExp(pattern, flags),\n jsmatch = string.match(jsp);\n if(jsmatch === null){return []}\n\n var _list = []\n for(var j = 0, _len_j = jsmatch.length; j < _len_j; j++) {\n var mo = {}\n mo._match=jsmatch[j]\n mo.group = function(){\n var res = []\n for(var i=0, _len_i = arguments.length; i < _len_i;i++){\n if(jsmatch[arguments[i]] === undefined){res.push(None)}\n else{res.push(jsmatch[arguments[i]])}\n }\n if(arguments.length == 1){return res[0]}\n return tuple.$factory(res)\n }\n mo.groups = function(_default){\n if(_default === undefined){_default=None}\n var res = []\n for(var i = 1, _len_i = jsmatch.length; i < _len_i; i++){\n if(jsmatch[i] === undefined){res.push(_default)}\n else{res.push(jsmatch[i])}\n }\n return tuple.$factory(res)\n }\n mo.start = function(){return mo._match.index}\n mo.end = function(){return mo._match.length - mo._match.index}\n mo.string = string\n _list.push(mo)\n }\n return _list\n }\n obj.search = function(pattern, string){\n var $ns = $B.args('re.search', 2,\n {pattern:null, string:null},['pattern', 'string'],\n arguments, {}, 'args', 'kw')\n var args = $ns['args']\n if(args.length>0){var flags = args[0]}\n else{var flags = getattr($ns['kw'], 'get')('flags', '')}\n flags = normflags(flags)\n var jsp = new RegExp(pattern, flags)\n var jsmatch = string.match(jsp)\n if(jsmatch === null){return None}\n return MatchObject.$factory(jsmatch, string, pattern)\n }\n obj.sub = function(pattern, repl, string){\n var $ns=$B.args('re.search', 3,\n {pattern: null, repl: null, string: null},\n ['pattern', 'repl', 'string'],\n arguments,{}, 'args', 'kw')\n for($var in $ns){eval(\"var \" + $var + \"=$ns[$var]\")}\n var args = $ns['args']\n var count = _b_.dict.get($ns['kw'], 'count', 0)\n var flags = _b_.dict.get($ns['kw'], 'flags', '')\n if(args.length > 0){var count = args[0]}\n if(args.length > 1){var flags = args[1]}\n flags = normflags(flags)\n if(typeof repl == \"string\"){\n // backreferences are \\1, \\2... in Python but $1,$2... in Javascript\n repl = repl.replace(/\\\\(\\d+)/g, '$$$1')\n }else if(typeof repl == \"function\"){\n // the argument passed to the Python function is the match object\n // the arguments passed to the Javascript function are :\n // - the matched substring\n // - the matched groups\n // - the offset of the matched substring inside the string\n // - the string being examined\n var $repl1 = function(){\n var mo = Object()\n mo.string = arguments[arguments.length - 1]\n var matched = arguments[0];\n var start = arguments[arguments.length - 2]\n var end = start + matched.length\n mo.start = function(){return start}\n mo.end = function(){return end}\n groups = []\n for(var i = 1, _len_i = arguments.length-2; i < _len_i; i++){\n groups.push(arguments[i])\n }\n mo.groups = function(_default){\n if(_default === undefined){_default = None}\n var res = []\n for(var i = 0, _len_i = groups.length; i < _len_i; i++){\n if(groups[i] === undefined){res.push(_default)}\n else{res.push(groups[i])}\n }\n return res\n }\n mo.group = function(i){\n if(i==0){return matched}\n return groups[i-1]\n }\n return repl(mo)\n }\n }\n if(count == 0){flags += 'g'}\n var jsp = new RegExp(pattern, flags)\n if(typeof repl == 'function'){return string.replace(jsp, $repl1)}\n else{return string.replace(jsp, repl)}\n }\n obj.match = (function(search_func){\n return function(){\n // match is like search but pattern must start with ^\n var pattern = arguments[0]\n if(pattern.charAt(0) != '^'){pattern = '^'+pattern}\n var args = [pattern]\n for(var i = 1, _len_i = arguments.length; i < _len_i; i++){\n args.push(arguments[i])\n }\n return search_func.apply(null, args)\n }\n })(obj.search)\n\n return obj\n}\n)(__BRYTHON__)\n"], "_locale": [".js", "var am = {\n \"C\": \"AM\",\n \"aa\": \"saaku\",\n \"ab\": \"AM\",\n \"ae\": \"AM\",\n \"af\": \"vm.\",\n \"ak\": \"AN\",\n \"am\": \"\\u1325\\u12cb\\u1275\",\n \"an\": \"AM\",\n \"ar\": \"\\u0635\",\n \"as\": \"\\u09f0\\u09be\\u09a4\\u09bf\\u09aa\\u09c1\",\n \"av\": \"AM\",\n \"ay\": \"AM\",\n \"az\": \"AM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"AM\",\n \"bi\": \"AM\",\n \"bm\": \"AM\",\n \"bn\": \"AM\",\n \"bo\": \"\\u0f66\\u0f94\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"A.M.\",\n \"bs\": \"prijepodne\",\n \"ca\": \"a. m.\",\n \"ce\": \"AM\",\n \"ch\": \"AM\",\n \"co\": \"\",\n \"cr\": \"AM\",\n \"cs\": \"dop.\",\n \"cu\": \"\\u0414\\u041f\",\n \"cv\": \"AM\",\n \"cy\": \"yb\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u0786\",\n \"dz\": \"\\u0f66\\u0f94\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u014bdi\",\n \"el\": \"\\u03c0\\u03bc\",\n \"en\": \"AM\",\n \"eo\": \"atm\",\n \"es\": \"\",\n \"et\": \"AM\",\n \"eu\": \"AM\",\n \"fa\": \"\\u0642.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ap.\",\n \"fj\": \"AM\",\n \"fo\": \"um fyr.\",\n \"fr\": \"\",\n \"fy\": \"AM\",\n \"ga\": \"r.n.\",\n \"gd\": \"m\",\n \"gl\": \"a.m.\",\n \"gn\": \"a.m.\",\n \"gu\": \"\\u0aaa\\u0ac2\\u0ab0\\u0acd\\u0ab5\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"a.m.\",\n \"ha\": \"AM\",\n \"he\": \"AM\",\n \"hi\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"AM\",\n \"hr\": \"\",\n \"ht\": \"AM\",\n \"hu\": \"de.\",\n \"hy\": \"\",\n \"hz\": \"AM\",\n \"ia\": \"a.m.\",\n \"id\": \"AM\",\n \"ie\": \"AM\",\n \"ig\": \"A.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"ik\": \"AM\",\n \"io\": \"AM\",\n \"is\": \"f.h.\",\n \"it\": \"\",\n \"iu\": \"AM\",\n \"ja\": \"\\u5348\\u524d\",\n \"jv\": \"\",\n \"ka\": \"AM\",\n \"kg\": \"AM\",\n \"ki\": \"Kiroko\",\n \"kj\": \"AM\",\n \"kk\": \"AM\",\n \"kl\": \"\",\n \"km\": \"\\u1796\\u17d2\\u179a\\u17b9\\u1780\",\n \"kn\": \"\\u0caa\\u0cc2\\u0cb0\\u0ccd\\u0cb5\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\uc804\",\n \"kr\": \"AM\",\n \"ks\": \"AM\",\n \"ku\": \"\\u067e.\\u0646\",\n \"kv\": \"AM\",\n \"kw\": \"a.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"AM\",\n \"li\": \"AM\",\n \"ln\": \"nt\\u0254\\u0301ng\\u0254\\u0301\",\n \"lo\": \"\\u0e81\\u0ec8\\u0ead\\u0e99\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"prie\\u0161piet\",\n \"lu\": \"Dinda\",\n \"lv\": \"priek\\u0161p.\",\n \"mg\": \"AM\",\n \"mh\": \"AM\",\n \"mi\": \"a.m.\",\n \"mk\": \"\\u043f\\u0440\\u0435\\u0442\\u043f\\u043b.\",\n \"ml\": \"AM\",\n \"mn\": \"??\",\n \"mo\": \"AM\",\n \"mr\": \"\\u092e.\\u092a\\u0942.\",\n \"ms\": \"PG\",\n \"mt\": \"AM\",\n \"my\": \"\\u1014\\u1036\\u1014\\u1000\\u103a\",\n \"na\": \"AM\",\n \"nb\": \"a.m.\",\n \"nd\": \"AM\",\n \"ne\": \"\\u092a\\u0942\\u0930\\u094d\\u0935\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"AM\",\n \"nl\": \"\",\n \"nn\": \"f.m.\",\n \"no\": \"a.m.\",\n \"nr\": \"AM\",\n \"nv\": \"AM\",\n \"ny\": \"AM\",\n \"oc\": \"AM\",\n \"oj\": \"AM\",\n \"om\": \"WD\",\n \"or\": \"AM\",\n \"os\": \"AM\",\n \"pa\": \"\\u0a38\\u0a35\\u0a47\\u0a30\",\n \"pi\": \"AM\",\n \"pl\": \"AM\",\n \"ps\": \"\\u063a.\\u0645.\",\n \"pt\": \"\",\n \"qu\": \"a.m.\",\n \"rc\": \"AM\",\n \"rm\": \"AM\",\n \"rn\": \"Z.MU.\",\n \"ro\": \"a.m.\",\n \"ru\": \"\",\n \"rw\": \"AM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u0942\\u0930\\u094d\\u0935\",\n \"sc\": \"AM\",\n \"sd\": \"AM\",\n \"se\": \"i.b.\",\n \"sg\": \"ND\",\n \"sh\": \"AM\",\n \"si\": \"\\u0db4\\u0dd9.\\u0dc0.\",\n \"sk\": \"AM\",\n \"sl\": \"dop.\",\n \"sm\": \"AM\",\n \"sn\": \"AM\",\n \"so\": \"sn.\",\n \"sq\": \"e paradites\",\n \"sr\": \"pre podne\",\n \"ss\": \"AM\",\n \"st\": \"AM\",\n \"su\": \"AM\",\n \"sv\": \"\",\n \"sw\": \"AM\",\n \"ta\": \"\\u0b95\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c2a\\u0c42\\u0c30\\u0c4d\\u0c35\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"AM\",\n \"ti\": \"\\u1295\\u1309\\u1206 \\u1230\\u12d3\\u1270\",\n \"tk\": \"\",\n \"tl\": \"AM\",\n \"tn\": \"AM\",\n \"to\": \"AM\",\n \"tr\": \"\\u00d6\\u00d6\",\n \"ts\": \"AM\",\n \"tt\": \"\",\n \"tw\": \"AM\",\n \"ty\": \"AM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0628?\\u0631?\\u0646\",\n \"uk\": \"AM\",\n \"ur\": \"AM\",\n \"uz\": \"TO\",\n \"ve\": \"AM\",\n \"vi\": \"SA\",\n \"vo\": \"AM\",\n \"wa\": \"AM\",\n \"wo\": \"\",\n \"xh\": \"AM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua210\",\n \"yo\": \"\\u00c0\\u00e1r?`\",\n \"za\": \"AM\",\n \"zh\": \"\\u4e0a\\u5348\",\n \"zu\": \"AM\"\n}\nvar pm = {\n \"C\": \"PM\",\n \"aa\": \"carra\",\n \"ab\": \"PM\",\n \"ae\": \"PM\",\n \"af\": \"nm.\",\n \"ak\": \"EW\",\n \"am\": \"\\u12a8\\u1230\\u12d3\\u1275\",\n \"an\": \"PM\",\n \"ar\": \"\\u0645\",\n \"as\": \"\\u0986\\u09ac\\u09c7\\u09b2\\u09bf\",\n \"av\": \"PM\",\n \"ay\": \"PM\",\n \"az\": \"PM\",\n \"ba\": \"\",\n \"be\": \"\",\n \"bg\": \"\",\n \"bh\": \"PM\",\n \"bi\": \"PM\",\n \"bm\": \"PM\",\n \"bn\": \"PM\",\n \"bo\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f51\\u0fb2\\u0f7c\",\n \"br\": \"G.M.\",\n \"bs\": \"popodne\",\n \"ca\": \"p. m.\",\n \"ce\": \"PM\",\n \"ch\": \"PM\",\n \"co\": \"\",\n \"cr\": \"PM\",\n \"cs\": \"odp.\",\n \"cu\": \"\\u041f\\u041f\",\n \"cv\": \"PM\",\n \"cy\": \"yh\",\n \"da\": \"\",\n \"de\": \"\",\n \"dv\": \"\\u0789\\u078a\",\n \"dz\": \"\\u0f55\\u0fb1\\u0f72\\u0f0b\\u0f46\\u0f0b\",\n \"ee\": \"\\u0263etr\\u0254\",\n \"el\": \"\\u03bc\\u03bc\",\n \"en\": \"PM\",\n \"eo\": \"ptm\",\n \"es\": \"\",\n \"et\": \"PM\",\n \"eu\": \"PM\",\n \"fa\": \"\\u0628.\\u0638\",\n \"ff\": \"\",\n \"fi\": \"ip.\",\n \"fj\": \"PM\",\n \"fo\": \"um sein.\",\n \"fr\": \"\",\n \"fy\": \"PM\",\n \"ga\": \"i.n.\",\n \"gd\": \"f\",\n \"gl\": \"p.m.\",\n \"gn\": \"p.m.\",\n \"gu\": \"\\u0a89\\u0aa4\\u0acd\\u0aa4\\u0ab0\\u00a0\\u0aae\\u0aa7\\u0acd\\u0aaf\\u0abe\\u0ab9\\u0acd\\u0aa8\",\n \"gv\": \"p.m.\",\n \"ha\": \"PM\",\n \"he\": \"PM\",\n \"hi\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ho\": \"PM\",\n \"hr\": \"\",\n \"ht\": \"PM\",\n \"hu\": \"du.\",\n \"hy\": \"\",\n \"hz\": \"PM\",\n \"ia\": \"p.m.\",\n \"id\": \"PM\",\n \"ie\": \"PM\",\n \"ig\": \"P.M.\",\n \"ii\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"ik\": \"PM\",\n \"io\": \"PM\",\n \"is\": \"e.h.\",\n \"it\": \"\",\n \"iu\": \"PM\",\n \"ja\": \"\\u5348\\u5f8c\",\n \"jv\": \"\",\n \"ka\": \"PM\",\n \"kg\": \"PM\",\n \"ki\": \"Hwa\\u0129-in\\u0129\",\n \"kj\": \"PM\",\n \"kk\": \"PM\",\n \"kl\": \"\",\n \"km\": \"\\u179b\\u17d2\\u1784\\u17b6\\u1785\",\n \"kn\": \"\\u0c85\\u0caa\\u0cb0\\u0cbe\\u0cb9\\u0ccd\\u0ca8\",\n \"ko\": \"\\uc624\\ud6c4\",\n \"kr\": \"PM\",\n \"ks\": \"PM\",\n \"ku\": \"\\u062f.\\u0646\",\n \"kv\": \"PM\",\n \"kw\": \"p.m.\",\n \"ky\": \"\",\n \"la\": \"\",\n \"lb\": \"\",\n \"lg\": \"PM\",\n \"li\": \"PM\",\n \"ln\": \"mp\\u00f3kwa\",\n \"lo\": \"\\u0eab\\u0ebc\\u0eb1\\u0e87\\u0e97\\u0ec8\\u0ebd\\u0e87\",\n \"lt\": \"popiet\",\n \"lu\": \"Dilolo\",\n \"lv\": \"p\\u0113cp.\",\n \"mg\": \"PM\",\n \"mh\": \"PM\",\n \"mi\": \"p.m.\",\n \"mk\": \"\\u043f\\u043e\\u043f\\u043b.\",\n \"ml\": \"PM\",\n \"mn\": \"?\\u0425\",\n \"mo\": \"PM\",\n \"mr\": \"\\u092e.\\u0928\\u0902.\",\n \"ms\": \"PTG\",\n \"mt\": \"PM\",\n \"my\": \"\\u100a\\u1014\\u1031\",\n \"na\": \"PM\",\n \"nb\": \"p.m.\",\n \"nd\": \"PM\",\n \"ne\": \"\\u0905\\u092a\\u0930\\u093e\\u0939\\u094d\\u0928\",\n \"ng\": \"PM\",\n \"nl\": \"\",\n \"nn\": \"e.m.\",\n \"no\": \"p.m.\",\n \"nr\": \"PM\",\n \"nv\": \"PM\",\n \"ny\": \"PM\",\n \"oc\": \"PM\",\n \"oj\": \"PM\",\n \"om\": \"WB\",\n \"or\": \"PM\",\n \"os\": \"PM\",\n \"pa\": \"\\u0a36\\u0a3e\\u0a2e\",\n \"pi\": \"PM\",\n \"pl\": \"PM\",\n \"ps\": \"\\u063a.\\u0648.\",\n \"pt\": \"\",\n \"qu\": \"p.m.\",\n \"rc\": \"PM\",\n \"rm\": \"PM\",\n \"rn\": \"Z.MW.\",\n \"ro\": \"p.m.\",\n \"ru\": \"\",\n \"rw\": \"PM\",\n \"sa\": \"\\u092e\\u0927\\u094d\\u092f\\u093e\\u0928\\u092a\\u091a\\u094d\\u092f\\u093e\\u0924\",\n \"sc\": \"PM\",\n \"sd\": \"PM\",\n \"se\": \"e.b.\",\n \"sg\": \"LK\",\n \"sh\": \"PM\",\n \"si\": \"\\u0db4.\\u0dc0.\",\n \"sk\": \"PM\",\n \"sl\": \"pop.\",\n \"sm\": \"PM\",\n \"sn\": \"PM\",\n \"so\": \"gn.\",\n \"sq\": \"e pasdites\",\n \"sr\": \"po podne\",\n \"ss\": \"PM\",\n \"st\": \"PM\",\n \"su\": \"PM\",\n \"sv\": \"\",\n \"sw\": \"PM\",\n \"ta\": \"\\u0bae\\u0bbe\\u0bb2\\u0bc8\",\n \"te\": \"\\u0c05\\u0c2a\\u0c30\\u0c3e\\u0c39\\u0c4d\\u0c28\",\n \"tg\": \"\",\n \"th\": \"PM\",\n \"ti\": \"\\u12f5\\u1215\\u122d \\u1230\\u12d3\\u1275\",\n \"tk\": \"\",\n \"tl\": \"PM\",\n \"tn\": \"PM\",\n \"to\": \"PM\",\n \"tr\": \"\\u00d6S\",\n \"ts\": \"PM\",\n \"tt\": \"\",\n \"tw\": \"PM\",\n \"ty\": \"PM\",\n \"ug\": \"\\u0686?\\u0634\\u062a\\u0649\\u0646 \\u0643?\\u064a\\u0649\\u0646\",\n \"uk\": \"PM\",\n \"ur\": \"PM\",\n \"uz\": \"TK\",\n \"ve\": \"PM\",\n \"vi\": \"CH\",\n \"vo\": \"PM\",\n \"wa\": \"PM\",\n \"wo\": \"\",\n \"xh\": \"PM\",\n \"yi\": \"\\ua0b5\\ua1aa\\ua20c\\ua248\",\n \"yo\": \"?`s\\u00e1n\",\n \"za\": \"PM\",\n \"zh\": \"\\u4e0b\\u5348\",\n \"zu\": \"PM\"\n}\n\nvar X_format = {\n \"%H:%M:%S\": [\n \"C\",\n \"ab\",\n \"ae\",\n \"af\",\n \"an\",\n \"av\",\n \"ay\",\n \"az\",\n \"ba\",\n \"be\",\n \"bg\",\n \"bh\",\n \"bi\",\n \"bm\",\n \"bo\",\n \"br\",\n \"bs\",\n \"ca\",\n \"ce\",\n \"ch\",\n \"co\",\n \"cr\",\n \"cs\",\n \"cu\",\n \"cv\",\n \"cy\",\n \"da\",\n \"de\",\n \"dv\",\n \"eo\",\n \"es\",\n \"et\",\n \"eu\",\n \"ff\",\n \"fj\",\n \"fo\",\n \"fr\",\n \"fy\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gu\",\n \"gv\",\n \"ha\",\n \"he\",\n \"hi\",\n \"ho\",\n \"hr\",\n \"ht\",\n \"hu\",\n \"hy\",\n \"hz\",\n \"ia\",\n \"ie\",\n \"ig\",\n \"ik\",\n \"io\",\n \"is\",\n \"it\",\n \"ja\",\n \"ka\",\n \"kg\",\n \"ki\",\n \"kj\",\n \"kk\",\n \"kl\",\n \"km\",\n \"kn\",\n \"kv\",\n \"kw\",\n \"ky\",\n \"la\",\n \"lb\",\n \"lg\",\n \"li\",\n \"ln\",\n \"lo\",\n \"lt\",\n \"lu\",\n \"lv\",\n \"mg\",\n \"mh\",\n \"mk\",\n \"mn\",\n \"mo\",\n \"mr\",\n \"mt\",\n \"my\",\n \"na\",\n \"nb\",\n \"nd\",\n \"ng\",\n \"nl\",\n \"nn\",\n \"no\",\n \"nr\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"or\",\n \"os\",\n \"pi\",\n \"pl\",\n \"ps\",\n \"pt\",\n \"rc\",\n \"rm\",\n \"rn\",\n \"ro\",\n \"ru\",\n \"rw\",\n \"sa\",\n \"sc\",\n \"se\",\n \"sg\",\n \"sh\",\n \"sk\",\n \"sl\",\n \"sm\",\n \"sn\",\n \"sr\",\n \"ss\",\n \"st\",\n \"su\",\n \"sv\",\n \"sw\",\n \"ta\",\n \"te\",\n \"tg\",\n \"th\",\n \"tk\",\n \"tl\",\n \"tn\",\n \"tr\",\n \"ts\",\n \"tt\",\n \"tw\",\n \"ty\",\n \"ug\",\n \"uk\",\n \"uz\",\n \"ve\",\n \"vo\",\n \"wa\",\n \"wo\",\n \"xh\",\n \"yo\",\n \"za\",\n \"zh\",\n \"zu\"\n ],\n \"%i:%M:%S %p\": [\n \"aa\",\n \"ak\",\n \"am\",\n \"bn\",\n \"el\",\n \"en\",\n \"iu\",\n \"kr\",\n \"ks\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"ne\",\n \"om\",\n \"sd\",\n \"so\",\n \"sq\",\n \"ti\",\n \"to\",\n \"ur\",\n \"vi\"\n ],\n \"%I:%M:%S %p\": [\n \"ar\",\n \"fa\",\n \"ku\",\n \"qu\"\n ],\n \"%p %i:%M:%S\": [\n \"as\",\n \"ii\",\n \"ko\",\n \"yi\"\n ],\n \"\\u0f46\\u0f74\\u0f0b\\u0f5a\\u0f7c\\u0f51\\u0f0b%i:%M:%S %p\": [\n \"dz\"\n ],\n \"%p ga %i:%M:%S\": [\n \"ee\"\n ],\n \"%H.%M.%S\": [\n \"fi\",\n \"id\",\n \"jv\",\n \"oc\",\n \"si\"\n ],\n \"%p %I:%M:%S\": [\n \"pa\"\n ]\n}\nvar x_format = {\n \"%m/%d/%y\": [\n \"C\"\n ],\n \"%d/%m/%Y\": [\n \"aa\",\n \"am\",\n \"bm\",\n \"bn\",\n \"ca\",\n \"co\",\n \"cy\",\n \"el\",\n \"es\",\n \"ff\",\n \"fr\",\n \"ga\",\n \"gd\",\n \"gl\",\n \"gn\",\n \"gv\",\n \"ha\",\n \"he\",\n \"id\",\n \"ig\",\n \"it\",\n \"iu\",\n \"jv\",\n \"ki\",\n \"kr\",\n \"kw\",\n \"la\",\n \"lg\",\n \"ln\",\n \"lo\",\n \"lu\",\n \"mi\",\n \"ml\",\n \"ms\",\n \"mt\",\n \"nd\",\n \"oc\",\n \"om\",\n \"pt\",\n \"qu\",\n \"rn\",\n \"sd\",\n \"sg\",\n \"so\",\n \"sw\",\n \"ti\",\n \"to\",\n \"uk\",\n \"ur\",\n \"uz\",\n \"vi\",\n \"wo\",\n \"yo\"\n ],\n \"%m/%d/%Y\": [\n \"ab\",\n \"ae\",\n \"an\",\n \"av\",\n \"ay\",\n \"bh\",\n \"bi\",\n \"ch\",\n \"cr\",\n \"cv\",\n \"ee\",\n \"en\",\n \"fj\",\n \"ho\",\n \"ht\",\n \"hz\",\n \"ie\",\n \"ik\",\n \"io\",\n \"kg\",\n \"kj\",\n \"ks\",\n \"kv\",\n \"li\",\n \"mh\",\n \"mo\",\n \"na\",\n \"ne\",\n \"ng\",\n \"nv\",\n \"ny\",\n \"oj\",\n \"pi\",\n \"rc\",\n \"sc\",\n \"sh\",\n \"sm\",\n \"su\",\n \"tl\",\n \"tw\",\n \"ty\",\n \"wa\",\n \"za\",\n \"zu\"\n ],\n \"%Y-%m-%d\": [\n \"af\",\n \"br\",\n \"ce\",\n \"dz\",\n \"eo\",\n \"ko\",\n \"lt\",\n \"mg\",\n \"nr\",\n \"rw\",\n \"se\",\n \"si\",\n \"sn\",\n \"ss\",\n \"st\",\n \"sv\",\n \"tn\",\n \"ts\",\n \"ug\",\n \"ve\",\n \"vo\",\n \"xh\"\n ],\n \"%Y/%m/%d\": [\n \"ak\",\n \"bo\",\n \"eu\",\n \"ia\",\n \"ii\",\n \"ja\",\n \"ku\",\n \"yi\",\n \"zh\"\n ],\n \"null\": [\n \"ar\",\n \"fa\",\n \"ps\",\n \"th\"\n ],\n \"%d-%m-%Y\": [\n \"as\",\n \"da\",\n \"fy\",\n \"hi\",\n \"kl\",\n \"mr\",\n \"my\",\n \"nl\",\n \"rm\",\n \"sa\",\n \"ta\"\n ],\n \"%d.%m.%Y\": [\n \"az\",\n \"cs\",\n \"de\",\n \"et\",\n \"fi\",\n \"fo\",\n \"hy\",\n \"is\",\n \"ka\",\n \"kk\",\n \"lv\",\n \"mk\",\n \"nb\",\n \"nn\",\n \"no\",\n \"os\",\n \"pl\",\n \"ro\",\n \"ru\",\n \"sq\",\n \"tg\",\n \"tr\",\n \"tt\"\n ],\n \"%d.%m.%y\": [\n \"ba\",\n \"be\",\n \"lb\"\n ],\n \"%d.%m.%Y \\u0433.\": [\n \"bg\"\n ],\n \"%d.%m.%Y.\": [\n \"bs\",\n \"hr\",\n \"sr\"\n ],\n \"%Y.%m.%d\": [\n \"cu\",\n \"mn\"\n ],\n \"%d/%m/%y\": [\n \"dv\",\n \"km\"\n ],\n \"%d-%m-%y\": [\n \"gu\",\n \"kn\",\n \"or\",\n \"pa\",\n \"te\"\n ],\n \"%Y. %m. %d.\": [\n \"hu\"\n ],\n \"%d-%b %y\": [\n \"ky\"\n ],\n \"%d. %m. %Y\": [\n \"sk\",\n \"sl\"\n ],\n \"%d.%m.%y \\u00fd.\": [\n \"tk\"\n ]\n}\n\n\nvar $module=(function($B){\n var _b_ = $B.builtins\n return {\n CHAR_MAX: 127,\n LC_ALL: 6,\n LC_COLLATE: 3,\n LC_CTYPE: 0,\n LC_MESSAGES: 5,\n LC_MONETARY: 4,\n LC_NUMERIC: 1,\n LC_TIME: 2,\n Error: _b_.ValueError,\n\n _date_format: function(spec, hour){\n var t,\n locale = __BRYTHON__.locale.substr(0, 2)\n\n if(spec == \"p\"){\n var res = hours < 12 ? am[locale] : pm[locale]\n if(res === undefined){\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n }\n return res\n }\n else if(spec == \"x\"){\n t = x_format\n }else if(spec == \"X\"){\n t = X_format\n }else{\n throw _b_.ValueError.$factory(\"invalid format\", spec)\n }\n for(var key in t){\n if(t[key].indexOf(locale) > -1){\n return key\n }\n }\n throw _b_.ValueError.$factory(\"no format \" + spec + \" for locale \" +\n locale)\n },\n\n localeconv: function(){\n var conv = {'grouping': [127],\n 'currency_symbol': '',\n 'n_sign_posn': 127,\n 'p_cs_precedes': 127,\n 'n_cs_precedes': 127,\n 'mon_grouping': [],\n 'n_sep_by_space': 127,\n 'decimal_point': '.',\n 'negative_sign': '',\n 'positive_sign': '',\n 'p_sep_by_space': 127,\n 'int_curr_symbol': '',\n 'p_sign_posn': 127,\n 'thousands_sep': '',\n 'mon_thousands_sep': '',\n 'frac_digits': 127,\n 'mon_decimal_point': '',\n 'int_frac_digits': 127\n }\n var res = $B.empty_dict()\n for(var key in conv){\n res.$string_dict[key] = [conv, res.$order++]\n }\n return res\n },\n\n setlocale : function(){\n var $ = $B.args(\"setlocale\", 2, {category: null, locale: null},\n [\"category\", \"locale\"], arguments, {locale: _b_.None},\n null, null)\n /// XXX category is currently ignored\n if($.locale == \"\"){\n // use browser language setting, if it is set\n var LANG = ($B.language || \"\").substr(0, 2)\n if(am.hasOwnProperty(LANG)){\n $B.locale = LANG\n return LANG\n }else{\n console.log(\"Unknown locale: \" + LANG)\n }\n }else if($.locale === _b_.None){\n // return current locale\n return $B.locale\n }else{\n // Only use 2 first characters\n try{$.locale.substr(0, 2)}\n catch(err){\n throw $module.Error.$factory(\"Invalid locale: \" + $.locale)\n }\n if(am.hasOwnProperty($.locale.substr(0, 2))){\n $B.locale = $.locale\n return $.locale\n }else{\n throw $module.Error.$factory(\"Unknown locale: \" + $.locale)\n }\n }\n }\n }\n})(__BRYTHON__)\n"], "_multiprocessing": [".js", "// multiprocessing\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\nvar $s=[]\nfor(var $b in _b_) $s.push('var ' + $b +'=_b_[\"'+$b+'\"]')\neval($s.join(';'))\n\n//for(var $py_builtin in _b_){eval(\"var \"+$py_builtin+\"=_b_[$py_builtin]\")}\n\nvar Process = {\n __class__:_b_.type,\n __mro__: [_b_.object],\n $infos:{\n __name__:'Process'\n },\n $is_class: true\n}\n\nvar $convert_args=function(args) {\n var _list=[]\n for(var i=0, _len_i = args.length; i < _len_i; i++) {\n var _a=args[i]\n if(isinstance(_a, str)){_list.push(\"'\"+_a+\"'\")} else {_list.push(_a)}\n }\n\n return _list.join(',')\n}\n\nProcess.is_alive = function(self){return self.$alive}\n\nProcess.join = function(self, timeout){\n // need to block until process is complete\n // could probably use a addEventListener to execute all existing code\n // after this join statement\n\n self.$worker.addEventListener('message', function (e) {\n var data=e.data\n if (data.stdout != '') { // output stdout from process\n $B.stdout.write(data.stdout)\n }\n }, false);\n}\n\nProcess.run = function(self){\n //fix me\n}\n\nProcess.start = function(self){\n self.$worker.postMessage({target: self.$target,\n args: $convert_args(self.$args),\n // kwargs: self.$kwargs\n })\n self.$worker.addEventListener('error', function(e) { throw e})\n self.$alive=true\n}\n\nProcess.terminate = function(self){\n self.$worker.terminate()\n self.$alive=false\n}\n\n// variables\n//name\n//daemon\n//pid\n//exitcode\n\nProcess. $factory = function(){\n //arguments group=None, target=None, name=None, args=(), kwargs=()\n\n var $ns=$B.args('Process',0,{},[],arguments,{},null,'kw')\n var kw=$ns['kw']\n\n var target=_b_.dict.get($ns['kw'],'target',None)\n var args=_b_.dict.get($ns['kw'],'args',tuple.$factory())\n\n var worker = new Worker('/src/web_workers/multiprocessing.js')\n\n var res = {\n __class__:Process,\n $worker: worker,\n name: $ns['name'] || None,\n $target: target+'',\n $args: args,\n //$kwargs: $ns['kw'],\n $alive: false\n }\n return res\n}\n\n$B.set_func_names(Process, \"multiprocessing\")\n\nvar Pool = $B.make_class(\"Pool\")\n\nPool.__enter__ = function(self){}\nPool.__exit__ = function(self){}\n\nPool.__str__ = Pool.toString = Pool.__repr__=function(self){\n return ''\n}\n\nPool.map = function(){\n\n var $ns=$B.args('Pool.map', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func=$ns['func']\n var fargs=$ns['fargs']\n\n var _results=[]\n\n fargs=iter(fargs)\n\n var _pos=0\n console.log(self.$processes)\n _workers=[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg=getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n console.log(arg)\n _workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n _results[e.data.pos]=e.data.result\n if (_results.length == args.length) return _results\n\n try {\n arg=getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n}\n\nPool.apply_async = function(){\n\n var $ns=$B.$MakeArgs('apply_async', 3,\n {self:null, func:null, fargs:null}, ['self', 'func', 'fargs'],\n arguments,{},'args','kw')\n var func=$ns['func']\n var fargs=$ns['fargs']\n\n fargs=iter(fargs)\n\n async_result = {}\n async_result.get=function(timeout){\n console.log(results)\n console.log(fargs)\n return this.results}\n async_result.results=[]\n\n var _pos=0\n\n _workers=[]\n for(var i=0; i < self.$processes; i++) {\n _workers[i] = new Worker('/src/web_workers/multiprocessing.js')\n var arg\n\n try{arg=getattr(fargs, '__next__')()}\n catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n }\n //console.log(arg)\n //_workers[i].finished=false\n _workers[i].postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n\n _workers[i].addEventListener('message', function(e) {\n async_result.results[e.data.pos]=e.data.result\n //if (_results.length == args.length) return _results\n\n try {\n arg=getattr(fargs, '__next__')()\n e.currentTarget.postMessage({target: func+'', pos: _pos,\n args: $convert_args([arg])})\n _pos++\n } catch(err) {\n if (err.__class__ !== _b_.StopIteration) throw err\n this.finished=true\n }\n }, false);\n }\n\n console.log(\"return\", async_result)\n return async_result\n}\n\nPool.$factory = function(){\n console.log(\"pool\")\n console.log(arguments)\n var $ns=$B.args('Pool',1,\n {processes:null},['processes'],arguments,{},'args','kw')\n //var kw=$ns['kw']\n\n var processes=$ns['processes']\n\n if (processes == None) {\n // look to see if we have stored cpu_count in local storage\n // maybe we should create a brython config file with settings,etc..??\n\n // if not there use a tool such as Core Estimator to calculate number of cpu's\n // http://eligrey.com/blog/post/cpu-core-estimation-with-javascript\n }\n\n console.log(processes)\n var res = {\n __class__:Pool,\n $processes:processes\n }\n return res\n}\n\n$B.set_func_names(Pool, \"multiprocessing\")\n\nreturn {Process:Process, Pool:Pool}\n\n})(__BRYTHON__)\n"], "_posixsubprocess": [".js", "var $module=(function($B){\n\n return {\n cloexec_pipe: function() {} // fixme\n }\n})(__BRYTHON__)\n"], "_profile": [".js", "// Private interface to the profiling instrumentation implemented in py_utils.js.\n// Uses local a copy of the eval function from py_builtin_functions.js\n\nvar $module=(function($B) {\n eval($B.InjectBuiltins());\n return {\n brython:$B,\n data:$B.$profile_data,\n start:$B.$profile.start,\n stop:$B.$profile.stop,\n pause:$B.$profile.pause,\n status:$B.$profile.status,\n clear:$B.$profile.clear,\n elapsed:$B.$profile.elapsed,\n run:function(src,_globals,_locals,nruns) {\n var current_frame = $B.frames_stack[$B.frames_stack.length-1]\n if(current_frame!==undefined){\n var current_locals_id = current_frame[0].replace(/\\./,'_'),\n current_globals_id = current_frame[2].replace(/\\./,'_')\n }\n\n var is_exec = true,\n leave = false\n\n // code will be run in a specific block\n var globals_id = '$profile_'+$B.UUID(),\n locals_id\n\n if(_locals===_globals){\n locals_id = globals_id\n }else{\n locals_id = '$profile_'+$B.UUID()\n }\n // Initialise the object for block namespaces\n eval('var $locals_'+globals_id+' = {}\\nvar $locals_'+locals_id+' = {}')\n\n // Initialise block globals\n\n // A _globals dictionary is provided, set or reuse its attribute\n // globals_id\n _globals.globals_id = _globals.globals_id || globals_id\n globals_id = _globals.globals_id\n\n if(_locals === _globals || _locals === undefined){\n locals_id = globals_id\n parent_scope = $B.builtins_scope\n }else{\n // The parent block of locals must be set to globals\n parent_scope = {\n id: globals_id,\n parent_block: $B.builtins_scope,\n binding: {}\n }\n for(var attr in _globals.$string_dict){\n parent_scope.binding[attr] = true\n }\n }\n\n // Initialise block globals\n if(_globals.$jsobj){var items = _globals.$jsobj}\n else{var items = _globals.$string_dict}\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + globals_id + '[\"' + item1 +\n '\"] = items[item]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n\n // Initialise block locals\n var items = _b_.dict.items(_locals), item\n if(_locals.$jsobj){var items = _locals.$jsobj}\n else{var items = _locals.$string_dict}\n for(var item in items){\n item1 = to_alias(item)\n try{\n eval('$locals_' + locals_id + '[\"' + item[0] + '\"] = item[1]')\n }catch(err){\n console.log(err)\n console.log('error setting', item)\n break\n }\n }\n //var nb_modules = Object.keys(__BRYTHON__.modules).length\n //console.log('before exec', nb_modules)\n\n console.log(\"call py2js\", src, globals_id, locals_id, parent_scope)\n var root = $B.py2js(src, globals_id, locals_id, parent_scope),\n js, gns, lns\n\n try{\n\n var js = root.to_js()\n\n var i,res,gns;\n for(i=0;i 255){return false}\n return unicode_iscased(cp)\n }\n\n function unicode_tolower(cp){\n var letter = String.fromCodePoint(cp),\n lower = letter.toLowerCase()\n return lower.charCodeAt(0)\n }\n\n function ascii_tolower(cp){\n return unicode_tolower(cp)\n }\n\nreturn {\n unicode_iscased: unicode_iscased,\n ascii_iscased: ascii_iscased,\n unicode_tolower: unicode_tolower,\n ascii_tolower: ascii_tolower\n}\n\n}\n\n)(__BRYTHON__)"], "_string": [".js", "var $module=(function($B){\n\nvar _b_ = $B.builtins\n\nfunction parts(format_string){\n var result = [],\n _parts = $B.split_format(format_string) // defined in py_string.js\n for(var i = 0; i < _parts.length; i+= 2){\n result.push({pre: _parts[i], fmt: _parts[i + 1]})\n }\n return result\n}\n\nfunction Tuple(){\n var args = []\n for(var i=0, len=arguments.length; i < len; i++){\n args.push(arguments[i])\n }\n return _b_.tuple.$factory(args)\n}\n\nreturn{\n\n formatter_field_name_split: function(fieldname){\n // Split the argument as a field name\n var parsed = $B.parse_format(fieldname),\n first = parsed.name,\n rest = []\n if(first.match(/\\d+/)){first = parseInt(first)}\n parsed.name_ext.forEach(function(ext){\n if(ext.startsWith(\"[\")){\n var item = ext.substr(1, ext.length - 2)\n if(item.match(/\\d+/)){\n rest.push(Tuple(false, parseInt(item)))\n }else{\n rest.push(Tuple(false, item))\n }\n }else{\n rest.push(Tuple(true, ext.substr(1)))\n }\n })\n return Tuple(first, _b_.iter(rest))\n },\n formatter_parser: function(format_string){\n // Parse the argument as a format string\n\n if(! _b_.isinstance(format_string, _b_.str)){\n throw _b_.ValueError.$factory(\"Invalid format string type: \" +\n $B.class_name(format_string))\n }\n\n var result = []\n parts(format_string).forEach(function(item){\n var pre = item.pre === undefined ? \"\" : item.pre,\n fmt = item.fmt\n if(fmt === undefined){\n result.push(Tuple(pre, _b_.None, _b_.None, _b_.None))\n }else if(fmt.string == ''){\n result.push(Tuple(pre, '', '', _b_.None))\n }else{\n result.push(Tuple(pre,\n fmt.raw_name + fmt.name_ext.join(\"\"),\n fmt.raw_spec,\n fmt.conv || _b_.None))\n }\n })\n return result\n }\n}\n})(__BRYTHON__)"], "_strptime": [".js", "var _b_ = __BRYTHON__.builtins\n\nvar $module = (function($B){\n return {\n _strptime_datetime: function(cls, s, fmt){\n var pos_s = 0,\n pos_fmt = 0,\n dt = {}\n function error(){\n throw Error(\"no match \" + pos_s + \" \" + s.charAt(pos_s) + \" \"+\n pos_fmt + \" \" + fmt.charAt(pos_fmt))\n }\n\n var locale = __BRYTHON__.locale,\n shortdays = [],\n longdays = [],\n conv_func = locale == \"C\" ?\n function(d){return d.toDateString()} :\n function(d, options){\n return d.toLocaleDateString(locale, options)\n }\n\n for(var day = 16; day < 23; day++){\n var d = new Date(Date.UTC(2012, 11, day, 3, 0, 0))\n shortdays.push(conv_func(d, {weekday: 'short'}))\n longdays.push(conv_func(d, {weekday: 'long'}))\n }\n\n var shortmonths = [],\n longmonths = []\n\n for(var month = 0; month < 12; month++){\n var d = new Date(Date.UTC(2012, month, 1, 3, 0, 0))\n shortmonths.push(conv_func(d, {month: 'short'}))\n longmonths.push(conv_func(d, {month: 'long'}))\n }\n\n var shortdays_re = new RegExp(shortdays.join(\"|\").replace(\".\", \"\\\\.\")),\n longdays_re = new RegExp(longdays.join(\"|\")),\n shortmonths_re = new RegExp(shortmonths.join(\"|\").replace(\".\", \"\\\\.\")),\n longmonths_re = new RegExp(longmonths.join(\"|\"))\n\n var regexps = {\n d: [\"day\", new RegExp(\"0[1-9]|[123][0-9]\")],\n f: [\"microsecond\", new RegExp(\"(\\\\d{1,6})\")],\n H: [\"hour\", new RegExp(\"[01][0-9]|2[0-3]|\\\\d\")],\n I: [\"hour\", new RegExp(\"0[0-9]|1[0-2]\")],\n m: [\"month\", new RegExp(\"0[1-9]|1[012]\")],\n M: [\"minute\", new RegExp(\"[0-5][0-9]\")],\n S: [\"second\", new RegExp(\"([1-5]\\\\d)|(0?\\\\d)\")],\n y: [\"year\", new RegExp(\"0{0,2}\\\\d{2}\")],\n Y: [\"year\", new RegExp(\"\\\\d{4}\")],\n z: [\"tzinfo\", new RegExp(\"Z\")]\n }\n\n while(pos_fmt < fmt.length){\n var car = fmt.charAt(pos_fmt)\n if(car == \"%\"){\n var spec = fmt.charAt(pos_fmt + 1),\n regexp = regexps[spec]\n if(regexp !== undefined){\n var re = regexp[1],\n attr = regexp[0],\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error()\n }else{\n if(dt[attr] !== undefined){\n throw Error(attr + \" is defined more than once\")\n }else{\n dt[attr] = parseInt(res[0])\n if(attr == \"microsecond\"){\n while(dt[attr] < 100000){\n dt[attr] *= 10\n }\n }else if(attr == \"tzinfo\"){\n // Only value supported for the moment : Z\n // (UTC)\n var dt_module = $B.imported[cls.__module__]\n dt.tzinfo = dt_module.timezone.utc\n }\n pos_fmt += 2\n pos_s += res[0].length\n }\n }\n }else if(spec == \"a\" || spec == \"A\"){\n // Locale's abbreviated (a) or full (A) weekday name\n var attr = \"weekday\",\n re = spec == \"a\" ? shortdays_re : longdays_re,\n t = spec == \"a\" ? shortdays : longdays\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error()\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n if(dt.weekday !== undefined){\n throw Error(attr + \" is defined more than once\")\n }else{\n dt.weekday = ix\n }\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"b\" || spec == \"B\"){\n // Locales's abbreviated (b) or full (B) month\n var attr = \"month\",\n re = spec == \"b\" ? shortmonths_re : longmonths_re,\n t = spec == \"b\" ? shortmonths : longmonths,\n res = re.exec(s.substr(pos_s))\n if(res === null){\n error()\n }else{\n var match = res[0],\n ix = t.indexOf(match)\n }\n if(dt.month !== undefined){\n throw Error(attr + \" is defined more than once\")\n }else{\n dt.month = ix + 1\n }\n pos_fmt += 2\n pos_s += match.length\n }else if(spec == \"c\"){\n // Locale's appropriate date and time representation\n var fmt1 = fmt.substr(0, pos_fmt - 1) + _locale_c_format() +\n fmt.substr(pos_fmt + 2)\n fmt = fmt1\n }else if(spec == \"%\"){\n if(s.charAt(pos_s) == \"%\"){\n pos_fmt++\n pos_s++\n }else{\n error()\n }\n }else{\n pos_fmt++\n }\n }else{\n if(car == s.charAt(pos_s)){\n pos_fmt++\n pos_s++\n }else{\n error()\n }\n }\n }\n return $B.$call(cls)(dt.year, dt.month, dt.day,\n dt.hour || 0, dt.minute || 0, dt.second || 0,\n dt.microsecond || 0, dt.tzinfo || _b_.None)\n }\n }\n})(__BRYTHON__)\n"], "_svg": [".js", "// creation of an HTML element\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\nvar TagSum = $B.TagSum // defined in py_dom.js\n\nvar $s=[]\nfor(var $b in _b_) $s.push('var ' + $b +' = _b_[\"' + $b + '\"]')\neval($s.join(';'))\n\nvar $svgNS = \"http://www.w3.org/2000/svg\"\nvar $xlinkNS = \"http://www.w3.org/1999/xlink\"\n\nfunction makeTagDict(tagName){\n // return the dictionary for the class associated with tagName\n var dict = $B.make_class(tagName)\n\n dict.__init__ = function(){\n var $ns = $B.args('__init__', 1, {self: null}, ['self'],\n arguments, {}, 'args', 'kw'),\n self = $ns['self'],\n args = $ns['args']\n if(args.length == 1){\n var first = args[0]\n if(isinstance(first, [str, int, float])){\n self.appendChild(document.createTextNode(str.$factory(first)))\n }else if(first.__class__ === TagSum){\n for(var i = 0, len = first.children.length; i < len; i++){\n self.appendChild(first.children[i].elt)\n }\n }else{ // argument is another DOMNode instance\n try{self.appendChild(first.elt)}\n catch(err){throw ValueError.$factory('wrong element ' + first)}\n }\n }\n\n // attributes\n var items = _b_.list.$factory(_b_.dict.items($ns['kw']))\n for(var i = 0, len = items.length; i < len; i++){\n // keyword arguments\n var arg = items[i][0],\n value = items[i][1]\n if(arg.toLowerCase().substr(0,2) == \"on\"){\n // Event binding passed as argument \"onclick\", \"onfocus\"...\n // Better use method bind of DOMNode objects\n var js = '$B.DOMNode.bind(self,\"' +\n arg.toLowerCase().substr(2)\n eval(js+'\",function(){'+value+'})')\n }else if(arg.toLowerCase() == \"style\"){\n $B.DOMNode.set_style(self,value)\n }else if(arg.toLowerCase().indexOf(\"href\") !== -1){ // xlink:href\n self.setAttributeNS( \"http://www.w3.org/1999/xlink\",\n \"href\",value)\n }else{\n if(value !== false){\n // option.selected=false sets it to true :-)\n try{\n arg = arg.replace('_', '-')\n self.setAttributeNS(null, arg, value)\n }catch(err){\n throw ValueError.$factory(\"can't set attribute \" + arg)\n }\n }\n }\n }\n }\n\n dict.__mro__ = [$B.DOMNode, $B.builtins.object]\n\n dict.__new__ = function(cls){\n var res = $B.DOMNode.$factory(document.createElementNS($svgNS, tagName))\n res.__class__ = cls\n return res\n }\n\n dict.$factory = function(){\n var res = $B.DOMNode.$factory(\n document.createElementNS($svgNS, tagName))\n res.__class__ = dict\n // apply __init__\n dict.__init__(res, ...arguments)\n return res\n }\n\n $B.set_func_names(dict, \"browser.svg\")\n\n return dict\n}\n\n\n// SVG\nvar $svg_tags = ['a',\n'altGlyph',\n'altGlyphDef',\n'altGlyphItem',\n'animate',\n'animateColor',\n'animateMotion',\n'animateTransform',\n'circle',\n'clipPath',\n'color_profile', // instead of color-profile\n'cursor',\n'defs',\n'desc',\n'ellipse',\n'feBlend',\n'foreignObject', //patch to enable foreign objects\n'g',\n'image',\n'line',\n'linearGradient',\n'marker',\n'mask',\n'path',\n'pattern',\n'polygon',\n'polyline',\n'radialGradient',\n'rect',\n'set',\n'stop',\n'svg',\n'text',\n'tref',\n'tspan',\n'use']\n\n// create classes\nvar obj = new Object()\nvar dicts = {}\nfor(var i = 0, len = $svg_tags.length; i < len; i++){\n var tag = $svg_tags[i]\n obj[tag] = makeTagDict(tag)\n}\n\nreturn obj\n})(__BRYTHON__)\n"], "_warnings": [".js", "var $module = (function($B){\n\n_b_ = $B.builtins\n\nreturn {\n __doc__: \"_warnings provides basic warning filtering support.\\n \" +\n \"It is a helper module to speed up interpreter start-up.\",\n\n default_action: \"default\",\n\n filters: [\n ['ignore', _b_.None, _b_.DeprecationWarning, _b_.None, 0],\n ['ignore', _b_.None, _b_.PendingDeprecationWarning, _b_.None, 0],\n ['ignore', _b_.None, _b_.ImportWarning, _b_.None, 0],\n ['ignore', _b_.None, _b_.BytesWarning, _b_.None, 0]].map(\n function(x){return _b_.tuple.$factory(x)}),\n\n once_registry: $B.empty_dict(),\n\n warn: function(){},\n\n warn_explicit: function(){}\n\n}\n\n})(__BRYTHON__)\n"], "_webcomponent": [".js", "// module for Web Components\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\n\nfunction define(tag_name, cls){\n var $ = $B.args(\"define\", 2, {tag_name: null, cls: null},\n [\"tag_name\", \"cls\"], arguments, {}, null, null),\n tag_name = $.tag_name,\n cls = $.cls\n if(typeof tag_name != \"string\"){\n throw _b_.TypeError.$factory(\"first argument of define() \" +\n \"must be a string, not '\" + $B.class_name(tag_name) + \"'\")\n }else if(tag_name.indexOf(\"-\") == -1){\n throw _b_.ValueError.$factory(\"custom tag name must \" +\n \"contain a hyphen (-)\")\n }\n if(!_b_.isinstance(cls, _b_.type)){\n throw _b_.TypeError.$factory(\"second argument of define() \" +\n \"must be a class, not '\" + $B.class_name(tag_name) + \"'\")\n }\n\n // Create the Javascript class used for the component. It must have\n // the same name as the Python class\n var src = String.raw`var WebComponent = class extends HTMLElement {\n constructor(){\n // Always call super first in constructor\n super()\n if(cls.__init__){\n try{\n var _self = $B.DOMNode.$factory(this)\n _self.__class__ = cls\n $B.$call(cls.__init__)(_self)\n if(WebComponent.initialized){\n var nb_attrs = _self.attributes.length\n for(var i = 0; i < nb_attrs; i++){\n var item = _self.attributes.item(i)\n throw _b_.TypeError.$factory(\"Custom element must not \" +\n \"have attributes, found: \" + item.name + '=\"' +\n item.value + '\"')\n }\n }\n }catch(err){\n $B.handle_error(err)\n }\n }\n }\n static get observedAttributes(){\n try{\n var obs_attr = $B.$getattr(cls, \"observedAttributes\")\n return $B.$call(obs_attr)(cls)\n }catch(err){\n if(! $B.is_exc(err, [_b_.AttributeError])){\n throw err\n }\n return []\n }\n }\n }\n `\n var name = cls.$infos.__name__\n eval(src.replace(/WebComponent/g, name))\n var webcomp = eval(name) // JS class for component\n webcomp.$cls = cls\n\n // Override __getattribute__ to handle DOMNode attributes such as\n // attachShadow\n cls.__getattribute__ = function(self, attr){\n try{\n return $B.DOMNode.__getattribute__(self, attr)\n }catch(err){\n if(err.__class__ === _b_.AttributeError){\n var ga = $B.$getattr(cls, \"__getattribute__\")\n return ga(self, attr)\n }else{\n throw err\n }\n }\n }\n\n var mro = [cls].concat(cls.__mro__)\n for(var i = 0, len = mro.length - 1; i < len; i++){\n var pcls = mro[i]\n for(var key in pcls){\n if(webcomp.prototype[key] === undefined &&\n typeof pcls[key] == \"function\"){\n webcomp.prototype[key] = (function(attr, klass){\n return function(){\n return $B.pyobj2jsobj(klass[attr]).call(null,\n $B.DOMNode.$factory(this), ...arguments)\n }\n })(key, pcls)\n }\n }\n }\n\n // define WebComp as the class to use for the specified tag name\n customElements.define(tag_name, webcomp)\n webcomp.initialized = true\n}\n\nfunction get(name){\n var ce = customElements.get(name)\n if(ce && ce.$cls){return ce.$cls}\n return _b_.None\n}\n\nreturn {\n define: define,\n get: get\n}\n\n})(__BRYTHON__)"], "_webworker": [".js", "// Web Worker implementation\n\nvar $module = (function($B){\n\nvar _b_ = $B.builtins\n\nvar brython_scripts = ['brython', 'brython_stdlib']\n\nvar wclass = $B.make_class(\"Worker\",\n function(worker){\n var res = worker\n res.send = res.postMessage\n return res\n }\n)\nwclass.__mro__ = [$B.JSObj, _b_.object]\n\n$B.set_func_names(wclass, \"browser.worker\")\n\nvar _Worker = $B.make_class(\"Worker\", function(id, onmessage, onerror){\n var $ = $B.args(\"__init__\", 3, {id: null, onmessage: null, onerror: null},\n ['id', 'onmessage', 'onerror'], arguments,\n {onmessage: _b_.None, onerror: _b_.None}, null, null),\n id = $.id,\n src = $B.webworkers[id]\n if(src === undefined){\n throw _b_.KeyError.$factory(id)\n }\n var script_id = \"worker\" + $B.UUID(),\n js = __BRYTHON__.imported.javascript.py2js(src,\n script_id),\n header = 'var $locals_' + script_id +' = {}\\n';\n brython_scripts.forEach(function(script){\n var url = $B.brython_path + script + \".js?\" +\n (new Date()).getTime()\n header += 'importScripts(\"' + url + '\")\\n'\n })\n // restore brython_path\n header += '__BRYTHON__.brython_path = \"' + $B.brython_path +\n '\"\\n'\n // restore path for imports (cf. issue #1305)\n header += '__BRYTHON__.path = \"' + $B.path +'\".split(\",\")\\n'\n // Call brython() to initialize internal Brython values\n header += 'brython(1)\\n'\n js = header + js\n var blob = new Blob([js], {type: \"application/js\"}),\n url = URL.createObjectURL(blob),\n w = new Worker(url),\n res = wclass.$factory(w)\n return res\n})\n\nreturn {\n Worker: _Worker\n}\n\n})(__BRYTHON__)\n"], "_zlib_utils": [".js", "\nfunction rfind(buf, seq){\n var buflen = buf.length,\n len = seq.length\n for(var i = buflen - len; i >= 0; i--){\n var chunk = buf.slice(i, i + len),\n found = true\n for(var j = 0; j < len; j++){\n if(chunk[j] != seq[j]){\n found = false\n break\n }\n }\n if(found){return i}\n }\n return -1\n}\n\n\nvar c;\nvar crcTable = [];\nfor(var n =0; n < 256; n++){\n c = n;\n for(var k =0; k < 8; k++){\n c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));\n }\n crcTable[n] = c;\n}\n\nvar $module = (function($B){\n\n return {\n crc32: function(str) {\n var crc = 0 ^ (-1);\n \n for (var i = 0; i < str.length; i++ ) {\n crc = (crc >>> 8) ^ crcTable[(crc ^ str.charCodeAt(i)) & 0xFF];\n }\n \n return (crc ^ (-1)) >>> 0;\n },\n\n lz_generator: function(text, size, min_len){\n /*\n Returns a list of items based on the LZ algorithm, using the\n specified window size and a minimum match length.\n The items are a tuple (length, distance) if a match has been\n found, and a byte otherwise.\n */\n // 'text' is an instance of Python 'bytes' class, the actual\n // bytes are in text.source\n text = text.source\n if(min_len === undefined){\n min_len = 3\n }\n var pos = 0, // position in text\n items = [] // returned items\n while(pos < text.length){\n sequence = text.slice(pos, pos + min_len)\n if(sequence.length < 3){\n for(var i = pos; i < text.length; i++){\n items.push(text[i])\n }\n break\n }\n // Search the sequence in the 'size' previous bytes\n buf = text.slice(pos - size, pos)\n buf_pos = rfind(buf, sequence)\n if(buf_pos > -1){\n // Match of length 3 found; search a longer one\n var len = 1\n while(len < 259 &&\n buf_pos + len < buf.length &&\n pos + len < text.length &&\n text[pos + len] == buf[buf_pos + len]){\n len += 1\n }\n match = text.slice(pos, pos + len)\n // \"Lazy matching\": search longer match starting at next\n // position\n longer_match = false\n if(pos + len < text.length - 2){\n match2 = text.slice(pos + 1, pos + len + 2)\n longer_buf_pos = rfind(buf, match2)\n if(longer_buf_pos > -1){\n // found longer match : emit current byte as\n // literal and move 1 byte forward\n longer_match = true\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n if(! longer_match){\n distance = buf.length - buf_pos\n items.push($B.fast_tuple([len, distance]))\n if(pos + len == text.length){\n break\n }else{\n pos += len\n items.push(text[pos])\n pos += 1\n }\n }\n }else{\n char = text[pos]\n items.push(char)\n pos += 1\n }\n }\n return items\n }\n }\n})(__BRYTHON__)"], "crypto_js": [".py", "", [], 1], "crypto_js.rollups": [".py", "", [], 1], "crypto_js.rollups.md5": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(s,p){var m={},l=m.lib={},n=function(){},r=l.Base={extend:function(b){n.prototype=this;var h=new n;b&&h.mixIn(b);h.hasOwnProperty(\"init\")||(h.init=function(){h.$super.init.apply(this,arguments)});h.init.prototype=h;h.$super=this;return h},create:function(){var b=this.extend();b.init.apply(b,arguments);return b},init:function(){},mixIn:function(b){for(var h in b)b.hasOwnProperty(h)&&(this[h]=b[h]);b.hasOwnProperty(\"toString\")&&(this.toString=b.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=l.WordArray=r.extend({init:function(b,h){b=this.words=b||[];this.sigBytes=h!=p?h:4*b.length},toString:function(b){return(b||t).stringify(this)},concat:function(b){var h=this.words,a=b.words,j=this.sigBytes;b=b.sigBytes;this.clamp();if(j%4)for(var g=0;g>>2]|=(a[g>>>2]>>>24-8*(g%4)&255)<<24-8*((j+g)%4);else if(65535>>2]=a[g>>>2];else h.push.apply(h,a);this.sigBytes+=b;return this},clamp:function(){var b=this.words,h=this.sigBytes;b[h>>>2]&=4294967295<<\n32-8*(h%4);b.length=s.ceil(h/4)},clone:function(){var b=r.clone.call(this);b.words=this.words.slice(0);return b},random:function(b){for(var h=[],a=0;a>>2]>>>24-8*(j%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>3]|=parseInt(b.substr(j,\n2),16)<<24-4*(j%8);return new q.init(g,a/2)}},a=v.Latin1={stringify:function(b){var a=b.words;b=b.sigBytes;for(var g=[],j=0;j>>2]>>>24-8*(j%4)&255));return g.join(\"\")},parse:function(b){for(var a=b.length,g=[],j=0;j>>2]|=(b.charCodeAt(j)&255)<<24-8*(j%4);return new q.init(g,a)}},u=v.Utf8={stringify:function(b){try{return decodeURIComponent(escape(a.stringify(b)))}catch(g){throw Error(\"Malformed UTF-8 data\");}},parse:function(b){return a.parse(unescape(encodeURIComponent(b)))}},\ng=l.BufferedBlockAlgorithm=r.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(b){\"string\"==typeof b&&(b=u.parse(b));this._data.concat(b);this._nDataBytes+=b.sigBytes},_process:function(b){var a=this._data,g=a.words,j=a.sigBytes,k=this.blockSize,m=j/(4*k),m=b?s.ceil(m):s.max((m|0)-this._minBufferSize,0);b=m*k;j=s.min(4*b,j);if(b){for(var l=0;l>>32-j)+k}function m(a,k,b,h,l,j,m){a=a+(k&h|b&~h)+l+m;return(a<>>32-j)+k}function l(a,k,b,h,l,j,m){a=a+(k^b^h)+l+m;return(a<>>32-j)+k}function n(a,k,b,h,l,j,m){a=a+(b^(k|~h))+l+m;return(a<>>32-j)+k}for(var r=CryptoJS,q=r.lib,v=q.WordArray,t=q.Hasher,q=r.algo,a=[],u=0;64>u;u++)a[u]=4294967296*s.abs(s.sin(u+1))|0;q=q.MD5=t.extend({_doReset:function(){this._hash=new v.init([1732584193,4023233417,2562383102,271733878])},\n_doProcessBlock:function(g,k){for(var b=0;16>b;b++){var h=k+b,w=g[h];g[h]=(w<<8|w>>>24)&16711935|(w<<24|w>>>8)&4278255360}var b=this._hash.words,h=g[k+0],w=g[k+1],j=g[k+2],q=g[k+3],r=g[k+4],s=g[k+5],t=g[k+6],u=g[k+7],v=g[k+8],x=g[k+9],y=g[k+10],z=g[k+11],A=g[k+12],B=g[k+13],C=g[k+14],D=g[k+15],c=b[0],d=b[1],e=b[2],f=b[3],c=p(c,d,e,f,h,7,a[0]),f=p(f,c,d,e,w,12,a[1]),e=p(e,f,c,d,j,17,a[2]),d=p(d,e,f,c,q,22,a[3]),c=p(c,d,e,f,r,7,a[4]),f=p(f,c,d,e,s,12,a[5]),e=p(e,f,c,d,t,17,a[6]),d=p(d,e,f,c,u,22,a[7]),\nc=p(c,d,e,f,v,7,a[8]),f=p(f,c,d,e,x,12,a[9]),e=p(e,f,c,d,y,17,a[10]),d=p(d,e,f,c,z,22,a[11]),c=p(c,d,e,f,A,7,a[12]),f=p(f,c,d,e,B,12,a[13]),e=p(e,f,c,d,C,17,a[14]),d=p(d,e,f,c,D,22,a[15]),c=m(c,d,e,f,w,5,a[16]),f=m(f,c,d,e,t,9,a[17]),e=m(e,f,c,d,z,14,a[18]),d=m(d,e,f,c,h,20,a[19]),c=m(c,d,e,f,s,5,a[20]),f=m(f,c,d,e,y,9,a[21]),e=m(e,f,c,d,D,14,a[22]),d=m(d,e,f,c,r,20,a[23]),c=m(c,d,e,f,x,5,a[24]),f=m(f,c,d,e,C,9,a[25]),e=m(e,f,c,d,q,14,a[26]),d=m(d,e,f,c,v,20,a[27]),c=m(c,d,e,f,B,5,a[28]),f=m(f,c,\nd,e,j,9,a[29]),e=m(e,f,c,d,u,14,a[30]),d=m(d,e,f,c,A,20,a[31]),c=l(c,d,e,f,s,4,a[32]),f=l(f,c,d,e,v,11,a[33]),e=l(e,f,c,d,z,16,a[34]),d=l(d,e,f,c,C,23,a[35]),c=l(c,d,e,f,w,4,a[36]),f=l(f,c,d,e,r,11,a[37]),e=l(e,f,c,d,u,16,a[38]),d=l(d,e,f,c,y,23,a[39]),c=l(c,d,e,f,B,4,a[40]),f=l(f,c,d,e,h,11,a[41]),e=l(e,f,c,d,q,16,a[42]),d=l(d,e,f,c,t,23,a[43]),c=l(c,d,e,f,x,4,a[44]),f=l(f,c,d,e,A,11,a[45]),e=l(e,f,c,d,D,16,a[46]),d=l(d,e,f,c,j,23,a[47]),c=n(c,d,e,f,h,6,a[48]),f=n(f,c,d,e,u,10,a[49]),e=n(e,f,c,d,\nC,15,a[50]),d=n(d,e,f,c,s,21,a[51]),c=n(c,d,e,f,A,6,a[52]),f=n(f,c,d,e,q,10,a[53]),e=n(e,f,c,d,y,15,a[54]),d=n(d,e,f,c,w,21,a[55]),c=n(c,d,e,f,v,6,a[56]),f=n(f,c,d,e,D,10,a[57]),e=n(e,f,c,d,t,15,a[58]),d=n(d,e,f,c,B,21,a[59]),c=n(c,d,e,f,r,6,a[60]),f=n(f,c,d,e,z,10,a[61]),e=n(e,f,c,d,j,15,a[62]),d=n(d,e,f,c,x,21,a[63]);b[0]=b[0]+c|0;b[1]=b[1]+d|0;b[2]=b[2]+e|0;b[3]=b[3]+f|0},_doFinalize:function(){var a=this._data,k=a.words,b=8*this._nDataBytes,h=8*a.sigBytes;k[h>>>5]|=128<<24-h%32;var l=s.floor(b/\n4294967296);k[(h+64>>>9<<4)+15]=(l<<8|l>>>24)&16711935|(l<<24|l>>>8)&4278255360;k[(h+64>>>9<<4)+14]=(b<<8|b>>>24)&16711935|(b<<24|b>>>8)&4278255360;a.sigBytes=4*(k.length+1);this._process();a=this._hash;k=a.words;for(b=0;4>b;b++)h=k[b],k[b]=(h<<8|h>>>24)&16711935|(h<<24|h>>>8)&4278255360;return a},clone:function(){var a=t.clone.call(this);a._hash=this._hash.clone();return a}});r.MD5=t._createHelper(q);r.HmacMD5=t._createHmacHelper(q)})(Math);\n"], "crypto_js.rollups.sha1": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(e,m){var p={},j=p.lib={},l=function(){},f=j.Base={extend:function(a){l.prototype=this;var c=new l;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nn=j.WordArray=f.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=m?c:4*a.length},toString:function(a){return(a||h).stringify(this)},concat:function(a){var c=this.words,q=a.words,d=this.sigBytes;a=a.sigBytes;this.clamp();if(d%4)for(var b=0;b>>2]|=(q[b>>>2]>>>24-8*(b%4)&255)<<24-8*((d+b)%4);else if(65535>>2]=q[b>>>2];else c.push.apply(c,q);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=e.ceil(c/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],b=0;b>>2]>>>24-8*(d%4)&255;b.push((f>>>4).toString(16));b.push((f&15).toString(16))}return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>3]|=parseInt(a.substr(d,\n2),16)<<24-4*(d%8);return new n.init(b,c/2)}},g=b.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var b=[],d=0;d>>2]>>>24-8*(d%4)&255));return b.join(\"\")},parse:function(a){for(var c=a.length,b=[],d=0;d>>2]|=(a.charCodeAt(d)&255)<<24-8*(d%4);return new n.init(b,c)}},r=b.Utf8={stringify:function(a){try{return decodeURIComponent(escape(g.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return g.parse(unescape(encodeURIComponent(a)))}},\nk=j.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new n.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=r.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,b=c.words,d=c.sigBytes,f=this.blockSize,h=d/(4*f),h=a?e.ceil(h):e.max((h|0)-this._minBufferSize,0);a=h*f;d=e.min(4*a,d);if(a){for(var g=0;ga;a++){if(16>a)l[a]=f[n+a]|0;else{var c=l[a-3]^l[a-8]^l[a-14]^l[a-16];l[a]=c<<1|c>>>31}c=(h<<5|h>>>27)+j+l[a];c=20>a?c+((g&e|~g&k)+1518500249):40>a?c+((g^e^k)+1859775393):60>a?c+((g&e|g&k|e&k)-1894007588):c+((g^e^\nk)-899497514);j=k;k=e;e=g<<30|g>>>2;g=h;h=c}b[0]=b[0]+h|0;b[1]=b[1]+g|0;b[2]=b[2]+e|0;b[3]=b[3]+k|0;b[4]=b[4]+j|0},_doFinalize:function(){var f=this._data,e=f.words,b=8*this._nDataBytes,h=8*f.sigBytes;e[h>>>5]|=128<<24-h%32;e[(h+64>>>9<<4)+14]=Math.floor(b/4294967296);e[(h+64>>>9<<4)+15]=b;f.sigBytes=4*e.length;this._process();return this._hash},clone:function(){var e=j.clone.call(this);e._hash=this._hash.clone();return e}});e.SHA1=j._createHelper(m);e.HmacSHA1=j._createHmacHelper(m)})();\n"], "crypto_js.rollups.sha224": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(g,l){var f={},k=f.lib={},h=function(){},m=k.Base={extend:function(a){h.prototype=this;var c=new h;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=k.WordArray=m.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=l?c:4*a.length},toString:function(a){return(a||s).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=g.ceil(c/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},n=t.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},j=t.Utf8={stringify:function(a){try{return decodeURIComponent(escape(n.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return n.parse(unescape(encodeURIComponent(a)))}},\nw=k.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=j.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?g.ceil(f):g.max((f|0)-this._minBufferSize,0);a=f*e;b=g.min(4*a,b);if(a){for(var u=0;un;){var j;a:{j=s;for(var w=g.sqrt(j),v=2;v<=w;v++)if(!(j%v)){j=!1;break a}j=!0}j&&(8>n&&(m[n]=t(g.pow(s,0.5))),q[n]=t(g.pow(s,1/3)),n++);s++}var a=[],f=f.SHA256=h.extend({_doReset:function(){this._hash=new k.init(m.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],g=b[2],k=b[3],h=b[4],l=b[5],m=b[6],n=b[7],p=0;64>p;p++){if(16>p)a[p]=\nc[d+p]|0;else{var j=a[p-15],r=a[p-2];a[p]=((j<<25|j>>>7)^(j<<14|j>>>18)^j>>>3)+a[p-7]+((r<<15|r>>>17)^(r<<13|r>>>19)^r>>>10)+a[p-16]}j=n+((h<<26|h>>>6)^(h<<21|h>>>11)^(h<<7|h>>>25))+(h&l^~h&m)+q[p]+a[p];r=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&g^f&g);n=m;m=l;l=h;h=k+j|0;k=g;g=f;f=e;e=j+r|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+g|0;b[3]=b[3]+k|0;b[4]=b[4]+h|0;b[5]=b[5]+l|0;b[6]=b[6]+m|0;b[7]=b[7]+n|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=g.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=h.clone.call(this);a._hash=this._hash.clone();return a}});l.SHA256=h._createHelper(f);l.HmacSHA256=h._createHmacHelper(f)})(Math);\n(function(){var g=CryptoJS,l=g.lib.WordArray,f=g.algo,k=f.SHA256,f=f.SHA224=k.extend({_doReset:function(){this._hash=new l.init([3238371032,914150663,812702999,4144912697,4290775857,1750603025,1694076839,3204075428])},_doFinalize:function(){var f=k._doFinalize.call(this);f.sigBytes-=4;return f}});g.SHA224=k._createHelper(f);g.HmacSHA224=k._createHmacHelper(f)})();\n"], "crypto_js.rollups.sha256": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(h,s){var f={},t=f.lib={},g=function(){},j=t.Base={extend:function(a){g.prototype=this;var c=new g;a&&c.mixIn(a);c.hasOwnProperty(\"init\")||(c.init=function(){c.$super.init.apply(this,arguments)});c.init.prototype=c;c.$super=this;return c},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var c in a)a.hasOwnProperty(c)&&(this[c]=a[c]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nq=t.WordArray=j.extend({init:function(a,c){a=this.words=a||[];this.sigBytes=c!=s?c:4*a.length},toString:function(a){return(a||u).stringify(this)},concat:function(a){var c=this.words,d=a.words,b=this.sigBytes;a=a.sigBytes;this.clamp();if(b%4)for(var e=0;e>>2]|=(d[e>>>2]>>>24-8*(e%4)&255)<<24-8*((b+e)%4);else if(65535>>2]=d[e>>>2];else c.push.apply(c,d);this.sigBytes+=a;return this},clamp:function(){var a=this.words,c=this.sigBytes;a[c>>>2]&=4294967295<<\n32-8*(c%4);a.length=h.ceil(c/4)},clone:function(){var a=j.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var c=[],d=0;d>>2]>>>24-8*(b%4)&255;d.push((e>>>4).toString(16));d.push((e&15).toString(16))}return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>3]|=parseInt(a.substr(b,\n2),16)<<24-4*(b%8);return new q.init(d,c/2)}},k=v.Latin1={stringify:function(a){var c=a.words;a=a.sigBytes;for(var d=[],b=0;b>>2]>>>24-8*(b%4)&255));return d.join(\"\")},parse:function(a){for(var c=a.length,d=[],b=0;b>>2]|=(a.charCodeAt(b)&255)<<24-8*(b%4);return new q.init(d,c)}},l=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(k.stringify(a)))}catch(c){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return k.parse(unescape(encodeURIComponent(a)))}},\nx=t.BufferedBlockAlgorithm=j.extend({reset:function(){this._data=new q.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=l.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var c=this._data,d=c.words,b=c.sigBytes,e=this.blockSize,f=b/(4*e),f=a?h.ceil(f):h.max((f|0)-this._minBufferSize,0);a=f*e;b=h.min(4*a,b);if(a){for(var m=0;mk;){var l;a:{l=u;for(var x=h.sqrt(l),w=2;w<=x;w++)if(!(l%w)){l=!1;break a}l=!0}l&&(8>k&&(j[k]=v(h.pow(u,0.5))),q[k]=v(h.pow(u,1/3)),k++);u++}var a=[],f=f.SHA256=g.extend({_doReset:function(){this._hash=new t.init(j.slice(0))},_doProcessBlock:function(c,d){for(var b=this._hash.words,e=b[0],f=b[1],m=b[2],h=b[3],p=b[4],j=b[5],k=b[6],l=b[7],n=0;64>n;n++){if(16>n)a[n]=\nc[d+n]|0;else{var r=a[n-15],g=a[n-2];a[n]=((r<<25|r>>>7)^(r<<14|r>>>18)^r>>>3)+a[n-7]+((g<<15|g>>>17)^(g<<13|g>>>19)^g>>>10)+a[n-16]}r=l+((p<<26|p>>>6)^(p<<21|p>>>11)^(p<<7|p>>>25))+(p&j^~p&k)+q[n]+a[n];g=((e<<30|e>>>2)^(e<<19|e>>>13)^(e<<10|e>>>22))+(e&f^e&m^f&m);l=k;k=j;j=p;p=h+r|0;h=m;m=f;f=e;e=r+g|0}b[0]=b[0]+e|0;b[1]=b[1]+f|0;b[2]=b[2]+m|0;b[3]=b[3]+h|0;b[4]=b[4]+p|0;b[5]=b[5]+j|0;b[6]=b[6]+k|0;b[7]=b[7]+l|0},_doFinalize:function(){var a=this._data,d=a.words,b=8*this._nDataBytes,e=8*a.sigBytes;\nd[e>>>5]|=128<<24-e%32;d[(e+64>>>9<<4)+14]=h.floor(b/4294967296);d[(e+64>>>9<<4)+15]=b;a.sigBytes=4*d.length;this._process();return this._hash},clone:function(){var a=g.clone.call(this);a._hash=this._hash.clone();return a}});s.SHA256=g._createHelper(f);s.HmacSHA256=g._createHmacHelper(f)})(Math);\n"], "crypto_js.rollups.sha3": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(v,p){var d={},u=d.lib={},r=function(){},f=u.Base={extend:function(a){r.prototype=this;var b=new r;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\ns=u.WordArray=f.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=p?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,c=a.words,j=this.sigBytes;a=a.sigBytes;this.clamp();if(j%4)for(var n=0;n>>2]|=(c[n>>>2]>>>24-8*(n%4)&255)<<24-8*((j+n)%4);else if(65535>>2]=c[n>>>2];else b.push.apply(b,c);this.sigBytes+=a;return this},clamp:function(){var a=this.words,b=this.sigBytes;a[b>>>2]&=4294967295<<\n32-8*(b%4);a.length=v.ceil(b/4)},clone:function(){var a=f.clone.call(this);a.words=this.words.slice(0);return a},random:function(a){for(var b=[],c=0;c>>2]>>>24-8*(j%4)&255;c.push((n>>>4).toString(16));c.push((n&15).toString(16))}return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>3]|=parseInt(a.substr(j,\n2),16)<<24-4*(j%8);return new s.init(c,b/2)}},e=x.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var c=[],j=0;j>>2]>>>24-8*(j%4)&255));return c.join(\"\")},parse:function(a){for(var b=a.length,c=[],j=0;j>>2]|=(a.charCodeAt(j)&255)<<24-8*(j%4);return new s.init(c,b)}},q=x.Utf8={stringify:function(a){try{return decodeURIComponent(escape(e.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return e.parse(unescape(encodeURIComponent(a)))}},\nt=u.BufferedBlockAlgorithm=f.extend({reset:function(){this._data=new s.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=q.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(a){var b=this._data,c=b.words,j=b.sigBytes,n=this.blockSize,e=j/(4*n),e=a?v.ceil(e):v.max((e|0)-this._minBufferSize,0);a=e*n;j=v.min(4*a,j);if(a){for(var f=0;ft;t++){s[e+5*q]=(t+1)*(t+2)/2%64;var w=(2*e+3*q)%5,e=q%5,q=w}for(e=0;5>e;e++)for(q=0;5>q;q++)x[e+5*q]=q+5*((2*e+3*q)%5);e=1;for(q=0;24>q;q++){for(var a=w=t=0;7>a;a++){if(e&1){var b=(1<b?w^=1<e;e++)c[e]=f.create();d=d.SHA3=r.extend({cfg:r.cfg.extend({outputLength:512}),_doReset:function(){for(var a=this._state=\n[],b=0;25>b;b++)a[b]=new f.init;this.blockSize=(1600-2*this.cfg.outputLength)/32},_doProcessBlock:function(a,b){for(var e=this._state,f=this.blockSize/2,h=0;h>>24)&16711935|(l<<24|l>>>8)&4278255360,m=(m<<8|m>>>24)&16711935|(m<<24|m>>>8)&4278255360,g=e[h];g.high^=m;g.low^=l}for(f=0;24>f;f++){for(h=0;5>h;h++){for(var d=l=0,k=0;5>k;k++)g=e[h+5*k],l^=g.high,d^=g.low;g=c[h];g.high=l;g.low=d}for(h=0;5>h;h++){g=c[(h+4)%5];l=c[(h+1)%5];m=l.high;k=l.low;l=g.high^\n(m<<1|k>>>31);d=g.low^(k<<1|m>>>31);for(k=0;5>k;k++)g=e[h+5*k],g.high^=l,g.low^=d}for(m=1;25>m;m++)g=e[m],h=g.high,g=g.low,k=s[m],32>k?(l=h<>>32-k,d=g<>>32-k):(l=g<>>64-k,d=h<>>64-k),g=c[x[m]],g.high=l,g.low=d;g=c[0];h=e[0];g.high=h.high;g.low=h.low;for(h=0;5>h;h++)for(k=0;5>k;k++)m=h+5*k,g=e[m],l=c[m],m=c[(h+1)%5+5*k],d=c[(h+2)%5+5*k],g.high=l.high^~m.high&d.high,g.low=l.low^~m.low&d.low;g=e[0];h=y[f];g.high^=h.high;g.low^=h.low}},_doFinalize:function(){var a=this._data,\nb=a.words,c=8*a.sigBytes,e=32*this.blockSize;b[c>>>5]|=1<<24-c%32;b[(v.ceil((c+1)/e)*e>>>5)-1]|=128;a.sigBytes=4*b.length;this._process();for(var a=this._state,b=this.cfg.outputLength/8,c=b/8,e=[],h=0;h>>24)&16711935|(f<<24|f>>>8)&4278255360,d=(d<<8|d>>>24)&16711935|(d<<24|d>>>8)&4278255360;e.push(d);e.push(f)}return new u.init(e,b)},clone:function(){for(var a=r.clone.call(this),b=a._state=this._state.slice(0),c=0;25>c;c++)b[c]=b[c].clone();return a}});\np.SHA3=r._createHelper(d);p.HmacSHA3=r._createHmacHelper(d)})(Math);\n"], "crypto_js.rollups.sha384": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,c){var d={},j=d.lib={},f=function(){},m=j.Base={extend:function(a){f.prototype=this;var b=new f;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\nB=j.WordArray=m.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=c?b:4*a.length},toString:function(a){return(a||y).stringify(this)},concat:function(a){var b=this.words,g=a.words,e=this.sigBytes;a=a.sigBytes;this.clamp();if(e%4)for(var k=0;k>>2]|=(g[k>>>2]>>>24-8*(k%4)&255)<<24-8*((e+k)%4);else if(65535>>2]=g[k>>>2];else b.push.apply(b,g);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=m.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],g=0;g>>2]>>>24-8*(e%4)&255;g.push((k>>>4).toString(16));g.push((k&15).toString(16))}return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>3]|=parseInt(a.substr(e,\n2),16)<<24-4*(e%8);return new B.init(g,b/2)}},F=v.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var g=[],e=0;e>>2]>>>24-8*(e%4)&255));return g.join(\"\")},parse:function(a){for(var b=a.length,g=[],e=0;e>>2]|=(a.charCodeAt(e)&255)<<24-8*(e%4);return new B.init(g,b)}},ha=v.Utf8={stringify:function(a){try{return decodeURIComponent(escape(F.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return F.parse(unescape(encodeURIComponent(a)))}},\nZ=j.BufferedBlockAlgorithm=m.extend({reset:function(){this._data=new B.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=ha.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,g=b.words,e=b.sigBytes,k=this.blockSize,m=e/(4*k),m=n?a.ceil(m):a.max((m|0)-this._minBufferSize,0);n=m*k;e=a.min(4*n,e);if(n){for(var c=0;cy;y++)v[y]=a();j=j.SHA512=d.extend({_doReset:function(){this._hash=new m.init([new f.init(1779033703,4089235720),new f.init(3144134277,2227873595),new f.init(1013904242,4271175723),new f.init(2773480762,1595750129),new f.init(1359893119,2917565137),new f.init(2600822924,725511199),new f.init(528734635,4215389547),new f.init(1541459225,327033209)])},_doProcessBlock:function(a,c){for(var d=this._hash.words,\nf=d[0],j=d[1],b=d[2],g=d[3],e=d[4],k=d[5],m=d[6],d=d[7],y=f.high,M=f.low,$=j.high,N=j.low,aa=b.high,O=b.low,ba=g.high,P=g.low,ca=e.high,Q=e.low,da=k.high,R=k.low,ea=m.high,S=m.low,fa=d.high,T=d.low,s=y,p=M,G=$,D=N,H=aa,E=O,W=ba,I=P,t=ca,q=Q,U=da,J=R,V=ea,K=S,X=fa,L=T,u=0;80>u;u++){var z=v[u];if(16>u)var r=z.high=a[c+2*u]|0,h=z.low=a[c+2*u+1]|0;else{var r=v[u-15],h=r.high,w=r.low,r=(h>>>1|w<<31)^(h>>>8|w<<24)^h>>>7,w=(w>>>1|h<<31)^(w>>>8|h<<24)^(w>>>7|h<<25),C=v[u-2],h=C.high,l=C.low,C=(h>>>19|l<<\n13)^(h<<3|l>>>29)^h>>>6,l=(l>>>19|h<<13)^(l<<3|h>>>29)^(l>>>6|h<<26),h=v[u-7],Y=h.high,A=v[u-16],x=A.high,A=A.low,h=w+h.low,r=r+Y+(h>>>0>>0?1:0),h=h+l,r=r+C+(h>>>0>>0?1:0),h=h+A,r=r+x+(h>>>0>>0?1:0);z.high=r;z.low=h}var Y=t&U^~t&V,A=q&J^~q&K,z=s&G^s&H^G&H,ja=p&D^p&E^D&E,w=(s>>>28|p<<4)^(s<<30|p>>>2)^(s<<25|p>>>7),C=(p>>>28|s<<4)^(p<<30|s>>>2)^(p<<25|s>>>7),l=B[u],ka=l.high,ga=l.low,l=L+((q>>>14|t<<18)^(q>>>18|t<<14)^(q<<23|t>>>9)),x=X+((t>>>14|q<<18)^(t>>>18|q<<14)^(t<<23|q>>>9))+(l>>>0<\nL>>>0?1:0),l=l+A,x=x+Y+(l>>>0>>0?1:0),l=l+ga,x=x+ka+(l>>>0>>0?1:0),l=l+h,x=x+r+(l>>>0>>0?1:0),h=C+ja,z=w+z+(h>>>0>>0?1:0),X=V,L=K,V=U,K=J,U=t,J=q,q=I+l|0,t=W+x+(q>>>0>>0?1:0)|0,W=H,I=E,H=G,E=D,G=s,D=p,p=l+h|0,s=x+z+(p>>>0>>0?1:0)|0}M=f.low=M+p;f.high=y+s+(M>>>0

>>0?1:0);N=j.low=N+D;j.high=$+G+(N>>>0>>0?1:0);O=b.low=O+E;b.high=aa+H+(O>>>0>>0?1:0);P=g.low=P+I;g.high=ba+W+(P>>>0>>0?1:0);Q=e.low=Q+q;e.high=ca+t+(Q>>>0>>0?1:0);R=k.low=R+J;k.high=da+U+(R>>>0>>0?1:0);\nS=m.low=S+K;m.high=ea+V+(S>>>0>>0?1:0);T=d.low=T+L;d.high=fa+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,c=a.words,d=8*this._nDataBytes,f=8*a.sigBytes;c[f>>>5]|=128<<24-f%32;c[(f+128>>>10<<5)+30]=Math.floor(d/4294967296);c[(f+128>>>10<<5)+31]=d;a.sigBytes=4*c.length;this._process();return this._hash.toX32()},clone:function(){var a=d.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});c.SHA512=d._createHelper(j);c.HmacSHA512=d._createHmacHelper(j)})();\n(function(){var a=CryptoJS,c=a.x64,d=c.Word,j=c.WordArray,c=a.algo,f=c.SHA512,c=c.SHA384=f.extend({_doReset:function(){this._hash=new j.init([new d.init(3418070365,3238371032),new d.init(1654270250,914150663),new d.init(2438529370,812702999),new d.init(355462360,4144912697),new d.init(1731405415,4290775857),new d.init(2394180231,1750603025),new d.init(3675008525,1694076839),new d.init(1203062813,3204075428)])},_doFinalize:function(){var a=f._doFinalize.call(this);a.sigBytes-=16;return a}});a.SHA384=\nf._createHelper(c);a.HmacSHA384=f._createHmacHelper(c)})();\n"], "crypto_js.rollups.sha512": [".js", "/*\nCryptoJS v3.1.2\ncode.google.com/p/crypto-js\n(c) 2009-2013 by Jeff Mott. All rights reserved.\ncode.google.com/p/crypto-js/wiki/License\n*/\nvar CryptoJS=CryptoJS||function(a,m){var r={},f=r.lib={},g=function(){},l=f.Base={extend:function(a){g.prototype=this;var b=new g;a&&b.mixIn(a);b.hasOwnProperty(\"init\")||(b.init=function(){b.$super.init.apply(this,arguments)});b.init.prototype=b;b.$super=this;return b},create:function(){var a=this.extend();a.init.apply(a,arguments);return a},init:function(){},mixIn:function(a){for(var b in a)a.hasOwnProperty(b)&&(this[b]=a[b]);a.hasOwnProperty(\"toString\")&&(this.toString=a.toString)},clone:function(){return this.init.prototype.extend(this)}},\np=f.WordArray=l.extend({init:function(a,b){a=this.words=a||[];this.sigBytes=b!=m?b:4*a.length},toString:function(a){return(a||q).stringify(this)},concat:function(a){var b=this.words,d=a.words,c=this.sigBytes;a=a.sigBytes;this.clamp();if(c%4)for(var j=0;j>>2]|=(d[j>>>2]>>>24-8*(j%4)&255)<<24-8*((c+j)%4);else if(65535>>2]=d[j>>>2];else b.push.apply(b,d);this.sigBytes+=a;return this},clamp:function(){var n=this.words,b=this.sigBytes;n[b>>>2]&=4294967295<<\n32-8*(b%4);n.length=a.ceil(b/4)},clone:function(){var a=l.clone.call(this);a.words=this.words.slice(0);return a},random:function(n){for(var b=[],d=0;d>>2]>>>24-8*(c%4)&255;d.push((j>>>4).toString(16));d.push((j&15).toString(16))}return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>3]|=parseInt(a.substr(c,\n2),16)<<24-4*(c%8);return new p.init(d,b/2)}},G=y.Latin1={stringify:function(a){var b=a.words;a=a.sigBytes;for(var d=[],c=0;c>>2]>>>24-8*(c%4)&255));return d.join(\"\")},parse:function(a){for(var b=a.length,d=[],c=0;c>>2]|=(a.charCodeAt(c)&255)<<24-8*(c%4);return new p.init(d,b)}},fa=y.Utf8={stringify:function(a){try{return decodeURIComponent(escape(G.stringify(a)))}catch(b){throw Error(\"Malformed UTF-8 data\");}},parse:function(a){return G.parse(unescape(encodeURIComponent(a)))}},\nh=f.BufferedBlockAlgorithm=l.extend({reset:function(){this._data=new p.init;this._nDataBytes=0},_append:function(a){\"string\"==typeof a&&(a=fa.parse(a));this._data.concat(a);this._nDataBytes+=a.sigBytes},_process:function(n){var b=this._data,d=b.words,c=b.sigBytes,j=this.blockSize,l=c/(4*j),l=n?a.ceil(l):a.max((l|0)-this._minBufferSize,0);n=l*j;c=a.min(4*n,c);if(n){for(var h=0;hq;q++)y[q]=a();f=f.SHA512=r.extend({_doReset:function(){this._hash=new l.init([new g.init(1779033703,4089235720),new g.init(3144134277,2227873595),new g.init(1013904242,4271175723),new g.init(2773480762,1595750129),new g.init(1359893119,2917565137),new g.init(2600822924,725511199),new g.init(528734635,4215389547),new g.init(1541459225,327033209)])},_doProcessBlock:function(a,f){for(var h=this._hash.words,\ng=h[0],n=h[1],b=h[2],d=h[3],c=h[4],j=h[5],l=h[6],h=h[7],q=g.high,m=g.low,r=n.high,N=n.low,Z=b.high,O=b.low,$=d.high,P=d.low,aa=c.high,Q=c.low,ba=j.high,R=j.low,ca=l.high,S=l.low,da=h.high,T=h.low,v=q,s=m,H=r,E=N,I=Z,F=O,W=$,J=P,w=aa,t=Q,U=ba,K=R,V=ca,L=S,X=da,M=T,x=0;80>x;x++){var B=y[x];if(16>x)var u=B.high=a[f+2*x]|0,e=B.low=a[f+2*x+1]|0;else{var u=y[x-15],e=u.high,z=u.low,u=(e>>>1|z<<31)^(e>>>8|z<<24)^e>>>7,z=(z>>>1|e<<31)^(z>>>8|e<<24)^(z>>>7|e<<25),D=y[x-2],e=D.high,k=D.low,D=(e>>>19|k<<13)^\n(e<<3|k>>>29)^e>>>6,k=(k>>>19|e<<13)^(k<<3|e>>>29)^(k>>>6|e<<26),e=y[x-7],Y=e.high,C=y[x-16],A=C.high,C=C.low,e=z+e.low,u=u+Y+(e>>>0>>0?1:0),e=e+k,u=u+D+(e>>>0>>0?1:0),e=e+C,u=u+A+(e>>>0>>0?1:0);B.high=u;B.low=e}var Y=w&U^~w&V,C=t&K^~t&L,B=v&H^v&I^H&I,ha=s&E^s&F^E&F,z=(v>>>28|s<<4)^(v<<30|s>>>2)^(v<<25|s>>>7),D=(s>>>28|v<<4)^(s<<30|v>>>2)^(s<<25|v>>>7),k=p[x],ia=k.high,ea=k.low,k=M+((t>>>14|w<<18)^(t>>>18|w<<14)^(t<<23|w>>>9)),A=X+((w>>>14|t<<18)^(w>>>18|t<<14)^(w<<23|t>>>9))+(k>>>0>>\n0?1:0),k=k+C,A=A+Y+(k>>>0>>0?1:0),k=k+ea,A=A+ia+(k>>>0>>0?1:0),k=k+e,A=A+u+(k>>>0>>0?1:0),e=D+ha,B=z+B+(e>>>0>>0?1:0),X=V,M=L,V=U,L=K,U=w,K=t,t=J+k|0,w=W+A+(t>>>0>>0?1:0)|0,W=I,J=F,I=H,F=E,H=v,E=s,s=k+e|0,v=A+B+(s>>>0>>0?1:0)|0}m=g.low=m+s;g.high=q+v+(m>>>0>>0?1:0);N=n.low=N+E;n.high=r+H+(N>>>0>>0?1:0);O=b.low=O+F;b.high=Z+I+(O>>>0>>0?1:0);P=d.low=P+J;d.high=$+W+(P>>>0>>0?1:0);Q=c.low=Q+t;c.high=aa+w+(Q>>>0>>0?1:0);R=j.low=R+K;j.high=ba+U+(R>>>0>>0?1:0);S=l.low=\nS+L;l.high=ca+V+(S>>>0>>0?1:0);T=h.low=T+M;h.high=da+X+(T>>>0>>0?1:0)},_doFinalize:function(){var a=this._data,f=a.words,h=8*this._nDataBytes,g=8*a.sigBytes;f[g>>>5]|=128<<24-g%32;f[(g+128>>>10<<5)+30]=Math.floor(h/4294967296);f[(g+128>>>10<<5)+31]=h;a.sigBytes=4*f.length;this._process();return this._hash.toX32()},clone:function(){var a=r.clone.call(this);a._hash=this._hash.clone();return a},blockSize:32});m.SHA512=r._createHelper(f);m.HmacSHA512=r._createHmacHelper(f)})();\n"], "abc": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) according to PEP 3119.\"\"\"\n\n\ndef abstractmethod(funcobj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n funcobj.__isabstractmethod__=True\n return funcobj\n \n \nclass abstractclassmethod(classmethod):\n ''\n\n\n \n \n __isabstractmethod__=True\n \n def __init__(self,callable):\n callable.__isabstractmethod__=True\n super().__init__(callable)\n \n \nclass abstractstaticmethod(staticmethod):\n ''\n\n\n \n \n __isabstractmethod__=True\n \n def __init__(self,callable):\n callable.__isabstractmethod__=True\n super().__init__(callable)\n \n \nclass abstractproperty(property):\n ''\n\n\n \n \n __isabstractmethod__=True\n \n \ntry :\n from _abc import (get_cache_token,_abc_init,_abc_register,\n _abc_instancecheck,_abc_subclasscheck,_get_dump,\n _reset_registry,_reset_caches)\nexcept ImportError:\n from _py_abc import ABCMeta,get_cache_token\n ABCMeta.__module__='abc'\nelse :\n class ABCMeta(type):\n ''\n\n\n\n\n\n\n\n\n\n\n \n def __new__(mcls,name,bases,namespace,**kwargs):\n cls=super().__new__(mcls,name,bases,namespace,**kwargs)\n _abc_init(cls)\n return cls\n \n def register(cls,subclass):\n ''\n\n\n \n return _abc_register(cls,subclass)\n \n def __instancecheck__(cls,instance):\n ''\n return _abc_instancecheck(cls,instance)\n \n def __subclasscheck__(cls,subclass):\n ''\n return _abc_subclasscheck(cls,subclass)\n \n def _dump_registry(cls,file=None ):\n ''\n print(f\"Class: {cls.__module__}.{cls.__qualname__}\",file=file)\n print(f\"Inv. counter: {get_cache_token()}\",file=file)\n (_abc_registry,_abc_cache,_abc_negative_cache,\n _abc_negative_cache_version)=_get_dump(cls)\n print(f\"_abc_registry: {_abc_registry!r}\",file=file)\n print(f\"_abc_cache: {_abc_cache!r}\",file=file)\n print(f\"_abc_negative_cache: {_abc_negative_cache!r}\",file=file)\n print(f\"_abc_negative_cache_version: {_abc_negative_cache_version!r}\",\n file=file)\n \n def _abc_registry_clear(cls):\n ''\n _reset_registry(cls)\n \n def _abc_caches_clear(cls):\n ''\n _reset_caches(cls)\n \n \nclass ABC(metaclass=ABCMeta):\n ''\n\n \n __slots__=()\n", ["_abc", "_py_abc"]], "antigravity": [".py", "\nimport webbrowser\nimport hashlib\n\nwebbrowser.open(\"https://xkcd.com/353/\")\n\ndef geohash(latitude,longitude,datedow):\n ''\n\n\n\n\n \n \n h=hashlib.md5(datedow,usedforsecurity=False ).hexdigest()\n p,q=[('%f'%float.fromhex('0.'+x))for x in (h[:16],h[16:32])]\n print('%d%s %d%s'%(latitude,p[1:],longitude,q[1:]))\n", ["hashlib", "webbrowser"]], "argparse": [".py", "\n\n\n\"\"\"Command-line parsing library\n\nThis module is an optparse-inspired command-line parsing library that:\n\n - handles both optional and positional arguments\n - produces highly informative usage messages\n - supports parsers that dispatch to sub-parsers\n\nThe following is a simple usage example that sums integers from the\ncommand-line and writes the result to a file::\n\n parser = argparse.ArgumentParser(\n description='sum the integers at the command line')\n parser.add_argument(\n 'integers', metavar='int', nargs='+', type=int,\n help='an integer to be summed')\n parser.add_argument(\n '--log', default=sys.stdout, type=argparse.FileType('w'),\n help='the file where the sum should be written')\n args = parser.parse_args()\n args.log.write('%s' % sum(args.integers))\n args.log.close()\n\nThe module contains the following public classes:\n\n - ArgumentParser -- The main entry point for command-line parsing. As the\n example above shows, the add_argument() method is used to populate\n the parser with actions for optional and positional arguments. Then\n the parse_args() method is invoked to convert the args at the\n command-line into an object with attributes.\n\n - ArgumentError -- The exception raised by ArgumentParser objects when\n there are errors with the parser's actions. Errors raised while\n parsing the command-line are caught by ArgumentParser and emitted\n as command-line messages.\n\n - FileType -- A factory for defining types of files to be created. As the\n example above shows, instances of FileType are typically passed as\n the type= argument of add_argument() calls.\n\n - Action -- The base class for parser actions. Typically actions are\n selected by passing strings like 'store_true' or 'append_const' to\n the action= argument of add_argument(). However, for greater\n customization of ArgumentParser actions, subclasses of Action may\n be defined and passed as the action= argument.\n\n - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,\n ArgumentDefaultsHelpFormatter -- Formatter classes which\n may be passed as the formatter_class= argument to the\n ArgumentParser constructor. HelpFormatter is the default,\n RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser\n not to change the formatting for help text, and\n ArgumentDefaultsHelpFormatter adds information about argument defaults\n to the help.\n\nAll other classes in this module are considered implementation details.\n(Also note that HelpFormatter and RawDescriptionHelpFormatter are only\nconsidered public as object names -- the API of the formatter objects is\nstill considered an implementation detail.)\n\"\"\"\n\n__version__='1.1'\n__all__=[\n'ArgumentParser',\n'ArgumentError',\n'ArgumentTypeError',\n'BooleanOptionalAction',\n'FileType',\n'HelpFormatter',\n'ArgumentDefaultsHelpFormatter',\n'RawDescriptionHelpFormatter',\n'RawTextHelpFormatter',\n'MetavarTypeHelpFormatter',\n'Namespace',\n'Action',\n'ONE_OR_MORE',\n'OPTIONAL',\n'PARSER',\n'REMAINDER',\n'SUPPRESS',\n'ZERO_OR_MORE',\n]\n\n\nimport os as _os\nimport re as _re\nimport sys as _sys\n\nfrom gettext import gettext as _,ngettext\n\nSUPPRESS='==SUPPRESS=='\n\nOPTIONAL='?'\nZERO_OR_MORE='*'\nONE_OR_MORE='+'\nPARSER='A...'\nREMAINDER='...'\n_UNRECOGNIZED_ARGS_ATTR='_unrecognized_args'\n\n\n\n\n\nclass _AttributeHolder(object):\n ''\n\n\n\n\n\n \n \n def __repr__(self):\n type_name=type(self).__name__\n arg_strings=[]\n star_args={}\n for arg in self._get_args():\n arg_strings.append(repr(arg))\n for name,value in self._get_kwargs():\n if name.isidentifier():\n arg_strings.append('%s=%r'%(name,value))\n else :\n star_args[name]=value\n if star_args:\n arg_strings.append('**%s'%repr(star_args))\n return '%s(%s)'%(type_name,', '.join(arg_strings))\n \n def _get_kwargs(self):\n return list(self.__dict__.items())\n \n def _get_args(self):\n return []\n \n \ndef _copy_items(items):\n if items is None :\n return []\n \n \n \n if type(items)is list:\n return items[:]\n import copy\n return copy.copy(items)\n \n \n \n \n \n \nclass HelpFormatter(object):\n ''\n\n\n\n \n \n def __init__(self,\n prog,\n indent_increment=2,\n max_help_position=24,\n width=None ):\n \n \n if width is None :\n import shutil\n width=shutil.get_terminal_size().columns\n width -=2\n \n self._prog=prog\n self._indent_increment=indent_increment\n self._max_help_position=min(max_help_position,\n max(width -20,indent_increment *2))\n self._width=width\n \n self._current_indent=0\n self._level=0\n self._action_max_length=0\n \n self._root_section=self._Section(self,None )\n self._current_section=self._root_section\n \n self._whitespace_matcher=_re.compile(r'\\s+',_re.ASCII)\n self._long_break_matcher=_re.compile(r'\\n\\n\\n+')\n \n \n \n \n def _indent(self):\n self._current_indent +=self._indent_increment\n self._level +=1\n \n def _dedent(self):\n self._current_indent -=self._indent_increment\n assert self._current_indent >=0,'Indent decreased below 0.'\n self._level -=1\n \n class _Section(object):\n \n def __init__(self,formatter,parent,heading=None ):\n self.formatter=formatter\n self.parent=parent\n self.heading=heading\n self.items=[]\n \n def format_help(self):\n \n if self.parent is not None :\n self.formatter._indent()\n join=self.formatter._join_parts\n item_help=join([func(*args)for func,args in self.items])\n if self.parent is not None :\n self.formatter._dedent()\n \n \n if not item_help:\n return ''\n \n \n if self.heading is not SUPPRESS and self.heading is not None :\n current_indent=self.formatter._current_indent\n heading='%*s%s:\\n'%(current_indent,'',self.heading)\n else :\n heading=''\n \n \n return join(['\\n',heading,item_help,'\\n'])\n \n def _add_item(self,func,args):\n self._current_section.items.append((func,args))\n \n \n \n \n def start_section(self,heading):\n self._indent()\n section=self._Section(self,self._current_section,heading)\n self._add_item(section.format_help,[])\n self._current_section=section\n \n def end_section(self):\n self._current_section=self._current_section.parent\n self._dedent()\n \n def add_text(self,text):\n if text is not SUPPRESS and text is not None :\n self._add_item(self._format_text,[text])\n \n def add_usage(self,usage,actions,groups,prefix=None ):\n if usage is not SUPPRESS:\n args=usage,actions,groups,prefix\n self._add_item(self._format_usage,args)\n \n def add_argument(self,action):\n if action.help is not SUPPRESS:\n \n \n get_invocation=self._format_action_invocation\n invocations=[get_invocation(action)]\n for subaction in self._iter_indented_subactions(action):\n invocations.append(get_invocation(subaction))\n \n \n invocation_length=max(map(len,invocations))\n action_length=invocation_length+self._current_indent\n self._action_max_length=max(self._action_max_length,\n action_length)\n \n \n self._add_item(self._format_action,[action])\n \n def add_arguments(self,actions):\n for action in actions:\n self.add_argument(action)\n \n \n \n \n def format_help(self):\n help=self._root_section.format_help()\n if help:\n help=self._long_break_matcher.sub('\\n\\n',help)\n help=help.strip('\\n')+'\\n'\n return help\n \n def _join_parts(self,part_strings):\n return ''.join([part\n for part in part_strings\n if part and part is not SUPPRESS])\n \n def _format_usage(self,usage,actions,groups,prefix):\n if prefix is None :\n prefix=_('usage: ')\n \n \n if usage is not None :\n usage=usage %dict(prog=self._prog)\n \n \n elif usage is None and not actions:\n usage='%(prog)s'%dict(prog=self._prog)\n \n \n elif usage is None :\n prog='%(prog)s'%dict(prog=self._prog)\n \n \n optionals=[]\n positionals=[]\n for action in actions:\n if action.option_strings:\n optionals.append(action)\n else :\n positionals.append(action)\n \n \n format=self._format_actions_usage\n action_usage=format(optionals+positionals,groups)\n usage=' '.join([s for s in [prog,action_usage]if s])\n \n \n text_width=self._width -self._current_indent\n if len(prefix)+len(usage)>text_width:\n \n \n part_regexp=(\n r'\\(.*?\\)+(?=\\s|$)|'\n r'\\[.*?\\]+(?=\\s|$)|'\n r'\\S+'\n )\n opt_usage=format(optionals,groups)\n pos_usage=format(positionals,groups)\n opt_parts=_re.findall(part_regexp,opt_usage)\n pos_parts=_re.findall(part_regexp,pos_usage)\n assert ' '.join(opt_parts)==opt_usage\n assert ' '.join(pos_parts)==pos_usage\n \n \n def get_lines(parts,indent,prefix=None ):\n lines=[]\n line=[]\n if prefix is not None :\n line_len=len(prefix)-1\n else :\n line_len=len(indent)-1\n for part in parts:\n if line_len+1+len(part)>text_width and line:\n lines.append(indent+' '.join(line))\n line=[]\n line_len=len(indent)-1\n line.append(part)\n line_len +=len(part)+1\n if line:\n lines.append(indent+' '.join(line))\n if prefix is not None :\n lines[0]=lines[0][len(indent):]\n return lines\n \n \n if len(prefix)+len(prog)<=0.75 *text_width:\n indent=' '*(len(prefix)+len(prog)+1)\n if opt_parts:\n lines=get_lines([prog]+opt_parts,indent,prefix)\n lines.extend(get_lines(pos_parts,indent))\n elif pos_parts:\n lines=get_lines([prog]+pos_parts,indent,prefix)\n else :\n lines=[prog]\n \n \n else :\n indent=' '*len(prefix)\n parts=opt_parts+pos_parts\n lines=get_lines(parts,indent)\n if len(lines)>1:\n lines=[]\n lines.extend(get_lines(opt_parts,indent))\n lines.extend(get_lines(pos_parts,indent))\n lines=[prog]+lines\n \n \n usage='\\n'.join(lines)\n \n \n return '%s%s\\n\\n'%(prefix,usage)\n \n def _format_actions_usage(self,actions,groups):\n \n group_actions=set()\n inserts={}\n for group in groups:\n try :\n start=actions.index(group._group_actions[0])\n except ValueError:\n continue\n else :\n end=start+len(group._group_actions)\n if actions[start:end]==group._group_actions:\n for action in group._group_actions:\n group_actions.add(action)\n if not group.required:\n if start in inserts:\n inserts[start]+=' ['\n else :\n inserts[start]='['\n if end in inserts:\n inserts[end]+=']'\n else :\n inserts[end]=']'\n else :\n if start in inserts:\n inserts[start]+=' ('\n else :\n inserts[start]='('\n if end in inserts:\n inserts[end]+=')'\n else :\n inserts[end]=')'\n for i in range(start+1,end):\n inserts[i]='|'\n \n \n parts=[]\n for i,action in enumerate(actions):\n \n \n \n if action.help is SUPPRESS:\n parts.append(None )\n if inserts.get(i)=='|':\n inserts.pop(i)\n elif inserts.get(i+1)=='|':\n inserts.pop(i+1)\n \n \n elif not action.option_strings:\n default=self._get_default_metavar_for_positional(action)\n part=self._format_args(action,default)\n \n \n if action in group_actions:\n if part[0]=='['and part[-1]==']':\n part=part[1:-1]\n \n \n parts.append(part)\n \n \n else :\n option_string=action.option_strings[0]\n \n \n \n if action.nargs ==0:\n part=action.format_usage()\n \n \n \n else :\n default=self._get_default_metavar_for_optional(action)\n args_string=self._format_args(action,default)\n part='%s %s'%(option_string,args_string)\n \n \n if not action.required and action not in group_actions:\n part='[%s]'%part\n \n \n parts.append(part)\n \n \n for i in sorted(inserts,reverse=True ):\n parts[i:i]=[inserts[i]]\n \n \n text=' '.join([item for item in parts if item is not None ])\n \n \n open=r'[\\[(]'\n close=r'[\\])]'\n text=_re.sub(r'(%s) '%open,r'\\1',text)\n text=_re.sub(r' (%s)'%close,r'\\1',text)\n text=_re.sub(r'%s *%s'%(open,close),r'',text)\n text=_re.sub(r'\\(([^|]*)\\)',r'\\1',text)\n text=text.strip()\n \n \n return text\n \n def _format_text(self,text):\n if '%(prog)'in text:\n text=text %dict(prog=self._prog)\n text_width=max(self._width -self._current_indent,11)\n indent=' '*self._current_indent\n return self._fill_text(text,text_width,indent)+'\\n\\n'\n \n def _format_action(self,action):\n \n help_position=min(self._action_max_length+2,\n self._max_help_position)\n help_width=max(self._width -help_position,11)\n action_width=help_position -self._current_indent -2\n action_header=self._format_action_invocation(action)\n \n \n if not action.help:\n tup=self._current_indent,'',action_header\n action_header='%*s%s\\n'%tup\n \n \n elif len(action_header)<=action_width:\n tup=self._current_indent,'',action_width,action_header\n action_header='%*s%-*s '%tup\n indent_first=0\n \n \n else :\n tup=self._current_indent,'',action_header\n action_header='%*s%s\\n'%tup\n indent_first=help_position\n \n \n parts=[action_header]\n \n \n if action.help:\n help_text=self._expand_help(action)\n help_lines=self._split_lines(help_text,help_width)\n parts.append('%*s%s\\n'%(indent_first,'',help_lines[0]))\n for line in help_lines[1:]:\n parts.append('%*s%s\\n'%(help_position,'',line))\n \n \n elif not action_header.endswith('\\n'):\n parts.append('\\n')\n \n \n for subaction in self._iter_indented_subactions(action):\n parts.append(self._format_action(subaction))\n \n \n return self._join_parts(parts)\n \n def _format_action_invocation(self,action):\n if not action.option_strings:\n default=self._get_default_metavar_for_positional(action)\n metavar,=self._metavar_formatter(action,default)(1)\n return metavar\n \n else :\n parts=[]\n \n \n \n if action.nargs ==0:\n parts.extend(action.option_strings)\n \n \n \n else :\n default=self._get_default_metavar_for_optional(action)\n args_string=self._format_args(action,default)\n for option_string in action.option_strings:\n parts.append('%s %s'%(option_string,args_string))\n \n return ', '.join(parts)\n \n def _metavar_formatter(self,action,default_metavar):\n if action.metavar is not None :\n result=action.metavar\n elif action.choices is not None :\n choice_strs=[str(choice)for choice in action.choices]\n result='{%s}'%','.join(choice_strs)\n else :\n result=default_metavar\n \n def format(tuple_size):\n if isinstance(result,tuple):\n return result\n else :\n return (result,)*tuple_size\n return format\n \n def _format_args(self,action,default_metavar):\n get_metavar=self._metavar_formatter(action,default_metavar)\n if action.nargs is None :\n result='%s'%get_metavar(1)\n elif action.nargs ==OPTIONAL:\n result='[%s]'%get_metavar(1)\n elif action.nargs ==ZERO_OR_MORE:\n metavar=get_metavar(1)\n if len(metavar)==2:\n result='[%s [%s ...]]'%metavar\n else :\n result='[%s ...]'%metavar\n elif action.nargs ==ONE_OR_MORE:\n result='%s [%s ...]'%get_metavar(2)\n elif action.nargs ==REMAINDER:\n result='...'\n elif action.nargs ==PARSER:\n result='%s ...'%get_metavar(1)\n elif action.nargs ==SUPPRESS:\n result=''\n else :\n try :\n formats=['%s'for _ in range(action.nargs)]\n except TypeError:\n raise ValueError(\"invalid nargs value\")from None\n result=' '.join(formats)%get_metavar(action.nargs)\n return result\n \n def _expand_help(self,action):\n params=dict(vars(action),prog=self._prog)\n for name in list(params):\n if params[name]is SUPPRESS:\n del params[name]\n for name in list(params):\n if hasattr(params[name],'__name__'):\n params[name]=params[name].__name__\n if params.get('choices')is not None :\n choices_str=', '.join([str(c)for c in params['choices']])\n params['choices']=choices_str\n return self._get_help_string(action)%params\n \n def _iter_indented_subactions(self,action):\n try :\n get_subactions=action._get_subactions\n except AttributeError:\n pass\n else :\n self._indent()\n yield from get_subactions()\n self._dedent()\n \n def _split_lines(self,text,width):\n text=self._whitespace_matcher.sub(' ',text).strip()\n \n \n import textwrap\n return textwrap.wrap(text,width)\n \n def _fill_text(self,text,width,indent):\n text=self._whitespace_matcher.sub(' ',text).strip()\n import textwrap\n return textwrap.fill(text,width,\n initial_indent=indent,\n subsequent_indent=indent)\n \n def _get_help_string(self,action):\n return action.help\n \n def _get_default_metavar_for_optional(self,action):\n return action.dest.upper()\n \n def _get_default_metavar_for_positional(self,action):\n return action.dest\n \n \nclass RawDescriptionHelpFormatter(HelpFormatter):\n ''\n\n\n\n \n \n def _fill_text(self,text,width,indent):\n return ''.join(indent+line for line in text.splitlines(keepends=True ))\n \n \nclass RawTextHelpFormatter(RawDescriptionHelpFormatter):\n ''\n\n\n\n \n \n def _split_lines(self,text,width):\n return text.splitlines()\n \n \nclass ArgumentDefaultsHelpFormatter(HelpFormatter):\n ''\n\n\n\n \n \n def _get_help_string(self,action):\n help=action.help\n if '%(default)'not in action.help:\n if action.default is not SUPPRESS:\n defaulting_nargs=[OPTIONAL,ZERO_OR_MORE]\n if action.option_strings or action.nargs in defaulting_nargs:\n help +=' (default: %(default)s)'\n return help\n \n \nclass MetavarTypeHelpFormatter(HelpFormatter):\n ''\n\n\n\n\n \n \n def _get_default_metavar_for_optional(self,action):\n return action.type.__name__\n \n def _get_default_metavar_for_positional(self,action):\n return action.type.__name__\n \n \n \n \n \n \n \ndef _get_action_name(argument):\n if argument is None :\n return None\n elif argument.option_strings:\n return '/'.join(argument.option_strings)\n elif argument.metavar not in (None ,SUPPRESS):\n return argument.metavar\n elif argument.dest not in (None ,SUPPRESS):\n return argument.dest\n else :\n return None\n \n \nclass ArgumentError(Exception):\n ''\n\n\n\n \n \n def __init__(self,argument,message):\n self.argument_name=_get_action_name(argument)\n self.message=message\n \n def __str__(self):\n if self.argument_name is None :\n format='%(message)s'\n else :\n format='argument %(argument_name)s: %(message)s'\n return format %dict(message=self.message,\n argument_name=self.argument_name)\n \n \nclass ArgumentTypeError(Exception):\n ''\n pass\n \n \n \n \n \n \nclass Action(_AttributeHolder):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n self.option_strings=option_strings\n self.dest=dest\n self.nargs=nargs\n self.const=const\n self.default=default\n self.type=type\n self.choices=choices\n self.required=required\n self.help=help\n self.metavar=metavar\n \n def _get_kwargs(self):\n names=[\n 'option_strings',\n 'dest',\n 'nargs',\n 'const',\n 'default',\n 'type',\n 'choices',\n 'help',\n 'metavar',\n ]\n return [(name,getattr(self,name))for name in names]\n \n def format_usage(self):\n return self.option_strings[0]\n \n def __call__(self,parser,namespace,values,option_string=None ):\n raise NotImplementedError(_('.__call__() not defined'))\n \nclass BooleanOptionalAction(Action):\n def __init__(self,\n option_strings,\n dest,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n \n _option_strings=[]\n for option_string in option_strings:\n _option_strings.append(option_string)\n \n if option_string.startswith('--'):\n option_string='--no-'+option_string[2:]\n _option_strings.append(option_string)\n \n if help is not None and default is not None :\n help +=f\" (default: {default})\"\n \n super().__init__(\n option_strings=_option_strings,\n dest=dest,\n nargs=0,\n default=default,\n type=type,\n choices=choices,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n if option_string in self.option_strings:\n setattr(namespace,self.dest,not option_string.startswith('--no-'))\n \n def format_usage(self):\n return ' | '.join(self.option_strings)\n \n \nclass _StoreAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n if nargs ==0:\n raise ValueError('nargs for store actions must be != 0; if you '\n 'have nothing to store, actions such as store '\n 'true or store const may be more appropriate')\n if const is not None and nargs !=OPTIONAL:\n raise ValueError('nargs must be %r to supply const'%OPTIONAL)\n super(_StoreAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=nargs,\n const=const,\n default=default,\n type=type,\n choices=choices,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n setattr(namespace,self.dest,values)\n \n \nclass _StoreConstAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n const,\n default=None ,\n required=False ,\n help=None ,\n metavar=None ):\n super(_StoreConstAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n const=const,\n default=default,\n required=required,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n setattr(namespace,self.dest,self.const)\n \n \nclass _StoreTrueAction(_StoreConstAction):\n\n def __init__(self,\n option_strings,\n dest,\n default=False ,\n required=False ,\n help=None ):\n super(_StoreTrueAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n const=True ,\n default=default,\n required=required,\n help=help)\n \n \nclass _StoreFalseAction(_StoreConstAction):\n\n def __init__(self,\n option_strings,\n dest,\n default=True ,\n required=False ,\n help=None ):\n super(_StoreFalseAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n const=False ,\n default=default,\n required=required,\n help=help)\n \n \nclass _AppendAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n nargs=None ,\n const=None ,\n default=None ,\n type=None ,\n choices=None ,\n required=False ,\n help=None ,\n metavar=None ):\n if nargs ==0:\n raise ValueError('nargs for append actions must be != 0; if arg '\n 'strings are not supplying the value to append, '\n 'the append const action may be more appropriate')\n if const is not None and nargs !=OPTIONAL:\n raise ValueError('nargs must be %r to supply const'%OPTIONAL)\n super(_AppendAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=nargs,\n const=const,\n default=default,\n type=type,\n choices=choices,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n items=getattr(namespace,self.dest,None )\n items=_copy_items(items)\n items.append(values)\n setattr(namespace,self.dest,items)\n \n \nclass _AppendConstAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n const,\n default=None ,\n required=False ,\n help=None ,\n metavar=None ):\n super(_AppendConstAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n const=const,\n default=default,\n required=required,\n help=help,\n metavar=metavar)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n items=getattr(namespace,self.dest,None )\n items=_copy_items(items)\n items.append(self.const)\n setattr(namespace,self.dest,items)\n \n \nclass _CountAction(Action):\n\n def __init__(self,\n option_strings,\n dest,\n default=None ,\n required=False ,\n help=None ):\n super(_CountAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=0,\n default=default,\n required=required,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n count=getattr(namespace,self.dest,None )\n if count is None :\n count=0\n setattr(namespace,self.dest,count+1)\n \n \nclass _HelpAction(Action):\n\n def __init__(self,\n option_strings,\n dest=SUPPRESS,\n default=SUPPRESS,\n help=None ):\n super(_HelpAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n \n def __call__(self,parser,namespace,values,option_string=None ):\n parser.print_help()\n parser.exit()\n \n \nclass _VersionAction(Action):\n\n def __init__(self,\n option_strings,\n version=None ,\n dest=SUPPRESS,\n default=SUPPRESS,\n help=\"show program's version number and exit\"):\n super(_VersionAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n default=default,\n nargs=0,\n help=help)\n self.version=version\n \n def __call__(self,parser,namespace,values,option_string=None ):\n version=self.version\n if version is None :\n version=parser.version\n formatter=parser._get_formatter()\n formatter.add_text(version)\n parser._print_message(formatter.format_help(),_sys.stdout)\n parser.exit()\n \n \nclass _SubParsersAction(Action):\n\n class _ChoicesPseudoAction(Action):\n \n def __init__(self,name,aliases,help):\n metavar=dest=name\n if aliases:\n metavar +=' (%s)'%', '.join(aliases)\n sup=super(_SubParsersAction._ChoicesPseudoAction,self)\n sup.__init__(option_strings=[],dest=dest,help=help,\n metavar=metavar)\n \n def __init__(self,\n option_strings,\n prog,\n parser_class,\n dest=SUPPRESS,\n required=False ,\n help=None ,\n metavar=None ):\n \n self._prog_prefix=prog\n self._parser_class=parser_class\n self._name_parser_map={}\n self._choices_actions=[]\n \n super(_SubParsersAction,self).__init__(\n option_strings=option_strings,\n dest=dest,\n nargs=PARSER,\n choices=self._name_parser_map,\n required=required,\n help=help,\n metavar=metavar)\n \n def add_parser(self,name,**kwargs):\n \n if kwargs.get('prog')is None :\n kwargs['prog']='%s %s'%(self._prog_prefix,name)\n \n aliases=kwargs.pop('aliases',())\n \n \n if 'help'in kwargs:\n help=kwargs.pop('help')\n choice_action=self._ChoicesPseudoAction(name,aliases,help)\n self._choices_actions.append(choice_action)\n \n \n parser=self._parser_class(**kwargs)\n self._name_parser_map[name]=parser\n \n \n for alias in aliases:\n self._name_parser_map[alias]=parser\n \n return parser\n \n def _get_subactions(self):\n return self._choices_actions\n \n def __call__(self,parser,namespace,values,option_string=None ):\n parser_name=values[0]\n arg_strings=values[1:]\n \n \n if self.dest is not SUPPRESS:\n setattr(namespace,self.dest,parser_name)\n \n \n try :\n parser=self._name_parser_map[parser_name]\n except KeyError:\n args={'parser_name':parser_name,\n 'choices':', '.join(self._name_parser_map)}\n msg=_('unknown parser %(parser_name)r (choices: %(choices)s)')%args\n raise ArgumentError(self,msg)\n \n \n \n \n \n \n \n \n subnamespace,arg_strings=parser.parse_known_args(arg_strings,None )\n for key,value in vars(subnamespace).items():\n setattr(namespace,key,value)\n \n if arg_strings:\n vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR,[])\n getattr(namespace,_UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)\n \nclass _ExtendAction(_AppendAction):\n def __call__(self,parser,namespace,values,option_string=None ):\n items=getattr(namespace,self.dest,None )\n items=_copy_items(items)\n items.extend(values)\n setattr(namespace,self.dest,items)\n \n \n \n \n \nclass FileType(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,mode='r',bufsize=-1,encoding=None ,errors=None ):\n self._mode=mode\n self._bufsize=bufsize\n self._encoding=encoding\n self._errors=errors\n \n def __call__(self,string):\n \n if string =='-':\n if 'r'in self._mode:\n return _sys.stdin\n elif 'w'in self._mode:\n return _sys.stdout\n else :\n msg=_('argument \"-\" with mode %r')%self._mode\n raise ValueError(msg)\n \n \n try :\n return open(string,self._mode,self._bufsize,self._encoding,\n self._errors)\n except OSError as e:\n args={'filename':string,'error':e}\n message=_(\"can't open '%(filename)s': %(error)s\")\n raise ArgumentTypeError(message %args)\n \n def __repr__(self):\n args=self._mode,self._bufsize\n kwargs=[('encoding',self._encoding),('errors',self._errors)]\n args_str=', '.join([repr(arg)for arg in args if arg !=-1]+\n ['%s=%r'%(kw,arg)for kw,arg in kwargs\n if arg is not None ])\n return '%s(%s)'%(type(self).__name__,args_str)\n \n \n \n \n \nclass Namespace(_AttributeHolder):\n ''\n\n\n\n \n \n def __init__(self,**kwargs):\n for name in kwargs:\n setattr(self,name,kwargs[name])\n \n def __eq__(self,other):\n if not isinstance(other,Namespace):\n return NotImplemented\n return vars(self)==vars(other)\n \n def __contains__(self,key):\n return key in self.__dict__\n \n \nclass _ActionsContainer(object):\n\n def __init__(self,\n description,\n prefix_chars,\n argument_default,\n conflict_handler):\n super(_ActionsContainer,self).__init__()\n \n self.description=description\n self.argument_default=argument_default\n self.prefix_chars=prefix_chars\n self.conflict_handler=conflict_handler\n \n \n self._registries={}\n \n \n self.register('action',None ,_StoreAction)\n self.register('action','store',_StoreAction)\n self.register('action','store_const',_StoreConstAction)\n self.register('action','store_true',_StoreTrueAction)\n self.register('action','store_false',_StoreFalseAction)\n self.register('action','append',_AppendAction)\n self.register('action','append_const',_AppendConstAction)\n self.register('action','count',_CountAction)\n self.register('action','help',_HelpAction)\n self.register('action','version',_VersionAction)\n self.register('action','parsers',_SubParsersAction)\n self.register('action','extend',_ExtendAction)\n \n \n self._get_handler()\n \n \n self._actions=[]\n self._option_string_actions={}\n \n \n self._action_groups=[]\n self._mutually_exclusive_groups=[]\n \n \n self._defaults={}\n \n \n self._negative_number_matcher=_re.compile(r'^-\\d+$|^-\\d*\\.\\d+$')\n \n \n \n self._has_negative_number_optionals=[]\n \n \n \n \n def register(self,registry_name,value,object):\n registry=self._registries.setdefault(registry_name,{})\n registry[value]=object\n \n def _registry_get(self,registry_name,value,default=None ):\n return self._registries[registry_name].get(value,default)\n \n \n \n \n def set_defaults(self,**kwargs):\n self._defaults.update(kwargs)\n \n \n \n for action in self._actions:\n if action.dest in kwargs:\n action.default=kwargs[action.dest]\n \n def get_default(self,dest):\n for action in self._actions:\n if action.dest ==dest and action.default is not None :\n return action.default\n return self._defaults.get(dest,None )\n \n \n \n \n \n def add_argument(self,*args,**kwargs):\n ''\n\n\n \n \n \n \n \n chars=self.prefix_chars\n if not args or len(args)==1 and args[0][0]not in chars:\n if args and 'dest'in kwargs:\n raise ValueError('dest supplied twice for positional argument')\n kwargs=self._get_positional_kwargs(*args,**kwargs)\n \n \n else :\n kwargs=self._get_optional_kwargs(*args,**kwargs)\n \n \n if 'default'not in kwargs:\n dest=kwargs['dest']\n if dest in self._defaults:\n kwargs['default']=self._defaults[dest]\n elif self.argument_default is not None :\n kwargs['default']=self.argument_default\n \n \n action_class=self._pop_action_class(kwargs)\n if not callable(action_class):\n raise ValueError('unknown action \"%s\"'%(action_class,))\n action=action_class(**kwargs)\n \n \n type_func=self._registry_get('type',action.type,action.type)\n if not callable(type_func):\n raise ValueError('%r is not callable'%(type_func,))\n \n if type_func is FileType:\n raise ValueError('%r is a FileType class object, instance of it'\n ' must be passed'%(type_func,))\n \n \n if hasattr(self,\"_get_formatter\"):\n try :\n self._get_formatter()._format_args(action,None )\n except TypeError:\n raise ValueError(\"length of metavar tuple does not match nargs\")\n \n return self._add_action(action)\n \n def add_argument_group(self,*args,**kwargs):\n group=_ArgumentGroup(self,*args,**kwargs)\n self._action_groups.append(group)\n return group\n \n def add_mutually_exclusive_group(self,**kwargs):\n group=_MutuallyExclusiveGroup(self,**kwargs)\n self._mutually_exclusive_groups.append(group)\n return group\n \n def _add_action(self,action):\n \n self._check_conflict(action)\n \n \n self._actions.append(action)\n action.container=self\n \n \n for option_string in action.option_strings:\n self._option_string_actions[option_string]=action\n \n \n for option_string in action.option_strings:\n if self._negative_number_matcher.match(option_string):\n if not self._has_negative_number_optionals:\n self._has_negative_number_optionals.append(True )\n \n \n return action\n \n def _remove_action(self,action):\n self._actions.remove(action)\n \n def _add_container_actions(self,container):\n \n title_group_map={}\n for group in self._action_groups:\n if group.title in title_group_map:\n msg=_('cannot merge actions - two groups are named %r')\n raise ValueError(msg %(group.title))\n title_group_map[group.title]=group\n \n \n group_map={}\n for group in container._action_groups:\n \n \n \n if group.title not in title_group_map:\n title_group_map[group.title]=self.add_argument_group(\n title=group.title,\n description=group.description,\n conflict_handler=group.conflict_handler)\n \n \n for action in group._group_actions:\n group_map[action]=title_group_map[group.title]\n \n \n \n \n for group in container._mutually_exclusive_groups:\n mutex_group=self.add_mutually_exclusive_group(\n required=group.required)\n \n \n for action in group._group_actions:\n group_map[action]=mutex_group\n \n \n for action in container._actions:\n group_map.get(action,self)._add_action(action)\n \n def _get_positional_kwargs(self,dest,**kwargs):\n \n if 'required'in kwargs:\n msg=_(\"'required' is an invalid argument for positionals\")\n raise TypeError(msg)\n \n \n \n if kwargs.get('nargs')not in [OPTIONAL,ZERO_OR_MORE]:\n kwargs['required']=True\n if kwargs.get('nargs')==ZERO_OR_MORE and 'default'not in kwargs:\n kwargs['required']=True\n \n \n return dict(kwargs,dest=dest,option_strings=[])\n \n def _get_optional_kwargs(self,*args,**kwargs):\n \n option_strings=[]\n long_option_strings=[]\n for option_string in args:\n \n if not option_string[0]in self.prefix_chars:\n args={'option':option_string,\n 'prefix_chars':self.prefix_chars}\n msg=_('invalid option string %(option)r: '\n 'must start with a character %(prefix_chars)r')\n raise ValueError(msg %args)\n \n \n option_strings.append(option_string)\n if len(option_string)>1 and option_string[1]in self.prefix_chars:\n long_option_strings.append(option_string)\n \n \n dest=kwargs.pop('dest',None )\n if dest is None :\n if long_option_strings:\n dest_option_string=long_option_strings[0]\n else :\n dest_option_string=option_strings[0]\n dest=dest_option_string.lstrip(self.prefix_chars)\n if not dest:\n msg=_('dest= is required for options like %r')\n raise ValueError(msg %option_string)\n dest=dest.replace('-','_')\n \n \n return dict(kwargs,dest=dest,option_strings=option_strings)\n \n def _pop_action_class(self,kwargs,default=None ):\n action=kwargs.pop('action',default)\n return self._registry_get('action',action,action)\n \n def _get_handler(self):\n \n handler_func_name='_handle_conflict_%s'%self.conflict_handler\n try :\n return getattr(self,handler_func_name)\n except AttributeError:\n msg=_('invalid conflict_resolution value: %r')\n raise ValueError(msg %self.conflict_handler)\n \n def _check_conflict(self,action):\n \n \n confl_optionals=[]\n for option_string in action.option_strings:\n if option_string in self._option_string_actions:\n confl_optional=self._option_string_actions[option_string]\n confl_optionals.append((option_string,confl_optional))\n \n \n if confl_optionals:\n conflict_handler=self._get_handler()\n conflict_handler(action,confl_optionals)\n \n def _handle_conflict_error(self,action,conflicting_actions):\n message=ngettext('conflicting option string: %s',\n 'conflicting option strings: %s',\n len(conflicting_actions))\n conflict_string=', '.join([option_string\n for option_string,action\n in conflicting_actions])\n raise ArgumentError(action,message %conflict_string)\n \n def _handle_conflict_resolve(self,action,conflicting_actions):\n \n \n for option_string,action in conflicting_actions:\n \n \n action.option_strings.remove(option_string)\n self._option_string_actions.pop(option_string,None )\n \n \n \n if not action.option_strings:\n action.container._remove_action(action)\n \n \nclass _ArgumentGroup(_ActionsContainer):\n\n def __init__(self,container,title=None ,description=None ,**kwargs):\n \n update=kwargs.setdefault\n update('conflict_handler',container.conflict_handler)\n update('prefix_chars',container.prefix_chars)\n update('argument_default',container.argument_default)\n super_init=super(_ArgumentGroup,self).__init__\n super_init(description=description,**kwargs)\n \n \n self.title=title\n self._group_actions=[]\n \n \n self._registries=container._registries\n self._actions=container._actions\n self._option_string_actions=container._option_string_actions\n self._defaults=container._defaults\n self._has_negative_number_optionals=\\\n container._has_negative_number_optionals\n self._mutually_exclusive_groups=container._mutually_exclusive_groups\n \n def _add_action(self,action):\n action=super(_ArgumentGroup,self)._add_action(action)\n self._group_actions.append(action)\n return action\n \n def _remove_action(self,action):\n super(_ArgumentGroup,self)._remove_action(action)\n self._group_actions.remove(action)\n \n \nclass _MutuallyExclusiveGroup(_ArgumentGroup):\n\n def __init__(self,container,required=False ):\n super(_MutuallyExclusiveGroup,self).__init__(container)\n self.required=required\n self._container=container\n \n def _add_action(self,action):\n if action.required:\n msg=_('mutually exclusive arguments must be optional')\n raise ValueError(msg)\n action=self._container._add_action(action)\n self._group_actions.append(action)\n return action\n \n def _remove_action(self,action):\n self._container._remove_action(action)\n self._group_actions.remove(action)\n \n \nclass ArgumentParser(_AttributeHolder,_ActionsContainer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,\n prog=None ,\n usage=None ,\n description=None ,\n epilog=None ,\n parents=[],\n formatter_class=HelpFormatter,\n prefix_chars='-',\n fromfile_prefix_chars=None ,\n argument_default=None ,\n conflict_handler='error',\n add_help=True ,\n allow_abbrev=True ,\n exit_on_error=True ):\n \n superinit=super(ArgumentParser,self).__init__\n superinit(description=description,\n prefix_chars=prefix_chars,\n argument_default=argument_default,\n conflict_handler=conflict_handler)\n \n \n if prog is None :\n prog=_os.path.basename(_sys.argv[0])\n \n self.prog=prog\n self.usage=usage\n self.epilog=epilog\n self.formatter_class=formatter_class\n self.fromfile_prefix_chars=fromfile_prefix_chars\n self.add_help=add_help\n self.allow_abbrev=allow_abbrev\n self.exit_on_error=exit_on_error\n \n add_group=self.add_argument_group\n self._positionals=add_group(_('positional arguments'))\n self._optionals=add_group(_('optional arguments'))\n self._subparsers=None\n \n \n def identity(string):\n return string\n self.register('type',None ,identity)\n \n \n \n default_prefix='-'if '-'in prefix_chars else prefix_chars[0]\n if self.add_help:\n self.add_argument(\n default_prefix+'h',default_prefix *2+'help',\n action='help',default=SUPPRESS,\n help=_('show this help message and exit'))\n \n \n for parent in parents:\n self._add_container_actions(parent)\n try :\n defaults=parent._defaults\n except AttributeError:\n pass\n else :\n self._defaults.update(defaults)\n \n \n \n \n def _get_kwargs(self):\n names=[\n 'prog',\n 'usage',\n 'description',\n 'formatter_class',\n 'conflict_handler',\n 'add_help',\n ]\n return [(name,getattr(self,name))for name in names]\n \n \n \n \n def add_subparsers(self,**kwargs):\n if self._subparsers is not None :\n self.error(_('cannot have multiple subparser arguments'))\n \n \n kwargs.setdefault('parser_class',type(self))\n \n if 'title'in kwargs or 'description'in kwargs:\n title=_(kwargs.pop('title','subcommands'))\n description=_(kwargs.pop('description',None ))\n self._subparsers=self.add_argument_group(title,description)\n else :\n self._subparsers=self._positionals\n \n \n \n if kwargs.get('prog')is None :\n formatter=self._get_formatter()\n positionals=self._get_positional_actions()\n groups=self._mutually_exclusive_groups\n formatter.add_usage(self.usage,positionals,groups,'')\n kwargs['prog']=formatter.format_help().strip()\n \n \n parsers_class=self._pop_action_class(kwargs,'parsers')\n action=parsers_class(option_strings=[],**kwargs)\n self._subparsers._add_action(action)\n \n \n return action\n \n def _add_action(self,action):\n if action.option_strings:\n self._optionals._add_action(action)\n else :\n self._positionals._add_action(action)\n return action\n \n def _get_optional_actions(self):\n return [action\n for action in self._actions\n if action.option_strings]\n \n def _get_positional_actions(self):\n return [action\n for action in self._actions\n if not action.option_strings]\n \n \n \n \n def parse_args(self,args=None ,namespace=None ):\n args,argv=self.parse_known_args(args,namespace)\n if argv:\n msg=_('unrecognized arguments: %s')\n self.error(msg %' '.join(argv))\n return args\n \n def parse_known_args(self,args=None ,namespace=None ):\n if args is None :\n \n args=_sys.argv[1:]\n else :\n \n args=list(args)\n \n \n if namespace is None :\n namespace=Namespace()\n \n \n for action in self._actions:\n if action.dest is not SUPPRESS:\n if not hasattr(namespace,action.dest):\n if action.default is not SUPPRESS:\n setattr(namespace,action.dest,action.default)\n \n \n for dest in self._defaults:\n if not hasattr(namespace,dest):\n setattr(namespace,dest,self._defaults[dest])\n \n \n if self.exit_on_error:\n try :\n namespace,args=self._parse_known_args(args,namespace)\n except ArgumentError:\n err=_sys.exc_info()[1]\n self.error(str(err))\n else :\n namespace,args=self._parse_known_args(args,namespace)\n \n if hasattr(namespace,_UNRECOGNIZED_ARGS_ATTR):\n args.extend(getattr(namespace,_UNRECOGNIZED_ARGS_ATTR))\n delattr(namespace,_UNRECOGNIZED_ARGS_ATTR)\n return namespace,args\n \n def _parse_known_args(self,arg_strings,namespace):\n \n if self.fromfile_prefix_chars is not None :\n arg_strings=self._read_args_from_files(arg_strings)\n \n \n \n action_conflicts={}\n for mutex_group in self._mutually_exclusive_groups:\n group_actions=mutex_group._group_actions\n for i,mutex_action in enumerate(mutex_group._group_actions):\n conflicts=action_conflicts.setdefault(mutex_action,[])\n conflicts.extend(group_actions[:i])\n conflicts.extend(group_actions[i+1:])\n \n \n \n \n option_string_indices={}\n arg_string_pattern_parts=[]\n arg_strings_iter=iter(arg_strings)\n for i,arg_string in enumerate(arg_strings_iter):\n \n \n if arg_string =='--':\n arg_string_pattern_parts.append('-')\n for arg_string in arg_strings_iter:\n arg_string_pattern_parts.append('A')\n \n \n \n else :\n option_tuple=self._parse_optional(arg_string)\n if option_tuple is None :\n pattern='A'\n else :\n option_string_indices[i]=option_tuple\n pattern='O'\n arg_string_pattern_parts.append(pattern)\n \n \n arg_strings_pattern=''.join(arg_string_pattern_parts)\n \n \n seen_actions=set()\n seen_non_default_actions=set()\n \n def take_action(action,argument_strings,option_string=None ):\n seen_actions.add(action)\n argument_values=self._get_values(action,argument_strings)\n \n \n \n \n if argument_values is not action.default:\n seen_non_default_actions.add(action)\n for conflict_action in action_conflicts.get(action,[]):\n if conflict_action in seen_non_default_actions:\n msg=_('not allowed with argument %s')\n action_name=_get_action_name(conflict_action)\n raise ArgumentError(action,msg %action_name)\n \n \n \n if argument_values is not SUPPRESS:\n action(self,namespace,argument_values,option_string)\n \n \n def consume_optional(start_index):\n \n \n option_tuple=option_string_indices[start_index]\n action,option_string,explicit_arg=option_tuple\n \n \n \n match_argument=self._match_argument\n action_tuples=[]\n while True :\n \n \n if action is None :\n extras.append(arg_strings[start_index])\n return start_index+1\n \n \n \n if explicit_arg is not None :\n arg_count=match_argument(action,'A')\n \n \n \n \n chars=self.prefix_chars\n if arg_count ==0 and option_string[1]not in chars:\n action_tuples.append((action,[],option_string))\n char=option_string[0]\n option_string=char+explicit_arg[0]\n new_explicit_arg=explicit_arg[1:]or None\n optionals_map=self._option_string_actions\n if option_string in optionals_map:\n action=optionals_map[option_string]\n explicit_arg=new_explicit_arg\n else :\n msg=_('ignored explicit argument %r')\n raise ArgumentError(action,msg %explicit_arg)\n \n \n \n elif arg_count ==1:\n stop=start_index+1\n args=[explicit_arg]\n action_tuples.append((action,args,option_string))\n break\n \n \n \n else :\n msg=_('ignored explicit argument %r')\n raise ArgumentError(action,msg %explicit_arg)\n \n \n \n \n else :\n start=start_index+1\n selected_patterns=arg_strings_pattern[start:]\n arg_count=match_argument(action,selected_patterns)\n stop=start+arg_count\n args=arg_strings[start:stop]\n action_tuples.append((action,args,option_string))\n break\n \n \n \n assert action_tuples\n for action,args,option_string in action_tuples:\n take_action(action,args,option_string)\n return stop\n \n \n \n positionals=self._get_positional_actions()\n \n \n def consume_positionals(start_index):\n \n match_partial=self._match_arguments_partial\n selected_pattern=arg_strings_pattern[start_index:]\n arg_counts=match_partial(positionals,selected_pattern)\n \n \n \n for action,arg_count in zip(positionals,arg_counts):\n args=arg_strings[start_index:start_index+arg_count]\n start_index +=arg_count\n take_action(action,args)\n \n \n \n positionals[:]=positionals[len(arg_counts):]\n return start_index\n \n \n \n extras=[]\n start_index=0\n if option_string_indices:\n max_option_string_index=max(option_string_indices)\n else :\n max_option_string_index=-1\n while start_index <=max_option_string_index:\n \n \n next_option_string_index=min([\n index\n for index in option_string_indices\n if index >=start_index])\n if start_index !=next_option_string_index:\n positionals_end_index=consume_positionals(start_index)\n \n \n \n if positionals_end_index >start_index:\n start_index=positionals_end_index\n continue\n else :\n start_index=positionals_end_index\n \n \n \n if start_index not in option_string_indices:\n strings=arg_strings[start_index:next_option_string_index]\n extras.extend(strings)\n start_index=next_option_string_index\n \n \n start_index=consume_optional(start_index)\n \n \n stop_index=consume_positionals(start_index)\n \n \n extras.extend(arg_strings[stop_index:])\n \n \n \n required_actions=[]\n for action in self._actions:\n if action not in seen_actions:\n if action.required:\n required_actions.append(_get_action_name(action))\n else :\n \n \n \n \n if (action.default is not None and\n isinstance(action.default,str)and\n hasattr(namespace,action.dest)and\n action.default is getattr(namespace,action.dest)):\n setattr(namespace,action.dest,\n self._get_value(action,action.default))\n \n if required_actions:\n self.error(_('the following arguments are required: %s')%\n ', '.join(required_actions))\n \n \n for group in self._mutually_exclusive_groups:\n if group.required:\n for action in group._group_actions:\n if action in seen_non_default_actions:\n break\n \n \n else :\n names=[_get_action_name(action)\n for action in group._group_actions\n if action.help is not SUPPRESS]\n msg=_('one of the arguments %s is required')\n self.error(msg %' '.join(names))\n \n \n return namespace,extras\n \n def _read_args_from_files(self,arg_strings):\n \n new_arg_strings=[]\n for arg_string in arg_strings:\n \n \n if not arg_string or arg_string[0]not in self.fromfile_prefix_chars:\n new_arg_strings.append(arg_string)\n \n \n else :\n try :\n with open(arg_string[1:])as args_file:\n arg_strings=[]\n for arg_line in args_file.read().splitlines():\n for arg in self.convert_arg_line_to_args(arg_line):\n arg_strings.append(arg)\n arg_strings=self._read_args_from_files(arg_strings)\n new_arg_strings.extend(arg_strings)\n except OSError:\n err=_sys.exc_info()[1]\n self.error(str(err))\n \n \n return new_arg_strings\n \n def convert_arg_line_to_args(self,arg_line):\n return [arg_line]\n \n def _match_argument(self,action,arg_strings_pattern):\n \n nargs_pattern=self._get_nargs_pattern(action)\n match=_re.match(nargs_pattern,arg_strings_pattern)\n \n \n if match is None :\n nargs_errors={\n None :_('expected one argument'),\n OPTIONAL:_('expected at most one argument'),\n ONE_OR_MORE:_('expected at least one argument'),\n }\n msg=nargs_errors.get(action.nargs)\n if msg is None :\n msg=ngettext('expected %s argument',\n 'expected %s arguments',\n action.nargs)%action.nargs\n raise ArgumentError(action,msg)\n \n \n return len(match.group(1))\n \n def _match_arguments_partial(self,actions,arg_strings_pattern):\n \n \n result=[]\n for i in range(len(actions),0,-1):\n actions_slice=actions[:i]\n pattern=''.join([self._get_nargs_pattern(action)\n for action in actions_slice])\n match=_re.match(pattern,arg_strings_pattern)\n if match is not None :\n result.extend([len(string)for string in match.groups()])\n break\n \n \n return result\n \n def _parse_optional(self,arg_string):\n \n if not arg_string:\n return None\n \n \n if not arg_string[0]in self.prefix_chars:\n return None\n \n \n if arg_string in self._option_string_actions:\n action=self._option_string_actions[arg_string]\n return action,arg_string,None\n \n \n if len(arg_string)==1:\n return None\n \n \n if '='in arg_string:\n option_string,explicit_arg=arg_string.split('=',1)\n if option_string in self._option_string_actions:\n action=self._option_string_actions[option_string]\n return action,option_string,explicit_arg\n \n \n \n option_tuples=self._get_option_tuples(arg_string)\n \n \n if len(option_tuples)>1:\n options=', '.join([option_string\n for action,option_string,explicit_arg in option_tuples])\n args={'option':arg_string,'matches':options}\n msg=_('ambiguous option: %(option)s could match %(matches)s')\n self.error(msg %args)\n \n \n \n elif len(option_tuples)==1:\n option_tuple,=option_tuples\n return option_tuple\n \n \n \n \n if self._negative_number_matcher.match(arg_string):\n if not self._has_negative_number_optionals:\n return None\n \n \n if ' 'in arg_string:\n return None\n \n \n \n return None ,arg_string,None\n \n def _get_option_tuples(self,option_string):\n result=[]\n \n \n \n chars=self.prefix_chars\n if option_string[0]in chars and option_string[1]in chars:\n if self.allow_abbrev:\n if '='in option_string:\n option_prefix,explicit_arg=option_string.split('=',1)\n else :\n option_prefix=option_string\n explicit_arg=None\n for option_string in self._option_string_actions:\n if option_string.startswith(option_prefix):\n action=self._option_string_actions[option_string]\n tup=action,option_string,explicit_arg\n result.append(tup)\n \n \n \n \n elif option_string[0]in chars and option_string[1]not in chars:\n option_prefix=option_string\n explicit_arg=None\n short_option_prefix=option_string[:2]\n short_explicit_arg=option_string[2:]\n \n for option_string in self._option_string_actions:\n if option_string ==short_option_prefix:\n action=self._option_string_actions[option_string]\n tup=action,option_string,short_explicit_arg\n result.append(tup)\n elif option_string.startswith(option_prefix):\n action=self._option_string_actions[option_string]\n tup=action,option_string,explicit_arg\n result.append(tup)\n \n \n else :\n self.error(_('unexpected option string: %s')%option_string)\n \n \n return result\n \n def _get_nargs_pattern(self,action):\n \n \n nargs=action.nargs\n \n \n if nargs is None :\n nargs_pattern='(-*A-*)'\n \n \n elif nargs ==OPTIONAL:\n nargs_pattern='(-*A?-*)'\n \n \n elif nargs ==ZERO_OR_MORE:\n nargs_pattern='(-*[A-]*)'\n \n \n elif nargs ==ONE_OR_MORE:\n nargs_pattern='(-*A[A-]*)'\n \n \n elif nargs ==REMAINDER:\n nargs_pattern='([-AO]*)'\n \n \n elif nargs ==PARSER:\n nargs_pattern='(-*A[-AO]*)'\n \n \n elif nargs ==SUPPRESS:\n nargs_pattern='(-*-*)'\n \n \n else :\n nargs_pattern='(-*%s-*)'%'-*'.join('A'*nargs)\n \n \n if action.option_strings:\n nargs_pattern=nargs_pattern.replace('-*','')\n nargs_pattern=nargs_pattern.replace('-','')\n \n \n return nargs_pattern\n \n \n \n \n \n def parse_intermixed_args(self,args=None ,namespace=None ):\n args,argv=self.parse_known_intermixed_args(args,namespace)\n if argv:\n msg=_('unrecognized arguments: %s')\n self.error(msg %' '.join(argv))\n return args\n \n def parse_known_intermixed_args(self,args=None ,namespace=None ):\n \n \n \n \n \n \n \n \n \n \n \n \n positionals=self._get_positional_actions()\n a=[action for action in positionals\n if action.nargs in [PARSER,REMAINDER]]\n if a:\n raise TypeError('parse_intermixed_args: positional arg'\n ' with nargs=%s'%a[0].nargs)\n \n if [action.dest for group in self._mutually_exclusive_groups\n for action in group._group_actions if action in positionals]:\n raise TypeError('parse_intermixed_args: positional in'\n ' mutuallyExclusiveGroup')\n \n try :\n save_usage=self.usage\n try :\n if self.usage is None :\n \n self.usage=self.format_usage()[7:]\n for action in positionals:\n \n action.save_nargs=action.nargs\n \n action.nargs=SUPPRESS\n action.save_default=action.default\n action.default=SUPPRESS\n namespace,remaining_args=self.parse_known_args(args,\n namespace)\n for action in positionals:\n \n if (hasattr(namespace,action.dest)\n and getattr(namespace,action.dest)==[]):\n from warnings import warn\n warn('Do not expect %s in %s'%(action.dest,namespace))\n delattr(namespace,action.dest)\n finally :\n \n for action in positionals:\n action.nargs=action.save_nargs\n action.default=action.save_default\n optionals=self._get_optional_actions()\n try :\n \n \n for action in optionals:\n action.save_required=action.required\n action.required=False\n for group in self._mutually_exclusive_groups:\n group.save_required=group.required\n group.required=False\n namespace,extras=self.parse_known_args(remaining_args,\n namespace)\n finally :\n \n for action in optionals:\n action.required=action.save_required\n for group in self._mutually_exclusive_groups:\n group.required=group.save_required\n finally :\n self.usage=save_usage\n return namespace,extras\n \n \n \n \n def _get_values(self,action,arg_strings):\n \n if action.nargs not in [PARSER,REMAINDER]:\n try :\n arg_strings.remove('--')\n except ValueError:\n pass\n \n \n if not arg_strings and action.nargs ==OPTIONAL:\n if action.option_strings:\n value=action.const\n else :\n value=action.default\n if isinstance(value,str):\n value=self._get_value(action,value)\n self._check_value(action,value)\n \n \n \n elif (not arg_strings and action.nargs ==ZERO_OR_MORE and\n not action.option_strings):\n if action.default is not None :\n value=action.default\n else :\n value=arg_strings\n self._check_value(action,value)\n \n \n elif len(arg_strings)==1 and action.nargs in [None ,OPTIONAL]:\n arg_string,=arg_strings\n value=self._get_value(action,arg_string)\n self._check_value(action,value)\n \n \n elif action.nargs ==REMAINDER:\n value=[self._get_value(action,v)for v in arg_strings]\n \n \n elif action.nargs ==PARSER:\n value=[self._get_value(action,v)for v in arg_strings]\n self._check_value(action,value[0])\n \n \n elif action.nargs ==SUPPRESS:\n value=SUPPRESS\n \n \n else :\n value=[self._get_value(action,v)for v in arg_strings]\n for v in value:\n self._check_value(action,v)\n \n \n return value\n \n def _get_value(self,action,arg_string):\n type_func=self._registry_get('type',action.type,action.type)\n if not callable(type_func):\n msg=_('%r is not callable')\n raise ArgumentError(action,msg %type_func)\n \n \n try :\n result=type_func(arg_string)\n \n \n except ArgumentTypeError:\n name=getattr(action.type,'__name__',repr(action.type))\n msg=str(_sys.exc_info()[1])\n raise ArgumentError(action,msg)\n \n \n except (TypeError,ValueError):\n name=getattr(action.type,'__name__',repr(action.type))\n args={'type':name,'value':arg_string}\n msg=_('invalid %(type)s value: %(value)r')\n raise ArgumentError(action,msg %args)\n \n \n return result\n \n def _check_value(self,action,value):\n \n if action.choices is not None and value not in action.choices:\n args={'value':value,\n 'choices':', '.join(map(repr,action.choices))}\n msg=_('invalid choice: %(value)r (choose from %(choices)s)')\n raise ArgumentError(action,msg %args)\n \n \n \n \n def format_usage(self):\n formatter=self._get_formatter()\n formatter.add_usage(self.usage,self._actions,\n self._mutually_exclusive_groups)\n return formatter.format_help()\n \n def format_help(self):\n formatter=self._get_formatter()\n \n \n formatter.add_usage(self.usage,self._actions,\n self._mutually_exclusive_groups)\n \n \n formatter.add_text(self.description)\n \n \n for action_group in self._action_groups:\n formatter.start_section(action_group.title)\n formatter.add_text(action_group.description)\n formatter.add_arguments(action_group._group_actions)\n formatter.end_section()\n \n \n formatter.add_text(self.epilog)\n \n \n return formatter.format_help()\n \n def _get_formatter(self):\n return self.formatter_class(prog=self.prog)\n \n \n \n \n def print_usage(self,file=None ):\n if file is None :\n file=_sys.stdout\n self._print_message(self.format_usage(),file)\n \n def print_help(self,file=None ):\n if file is None :\n file=_sys.stdout\n self._print_message(self.format_help(),file)\n \n def _print_message(self,message,file=None ):\n if message:\n if file is None :\n file=_sys.stderr\n file.write(message)\n \n \n \n \n def exit(self,status=0,message=None ):\n if message:\n self._print_message(message,_sys.stderr)\n _sys.exit(status)\n \n def error(self,message):\n ''\n\n\n\n\n\n\n \n self.print_usage(_sys.stderr)\n args={'prog':self.prog,'message':message}\n self.exit(2,_('%(prog)s: error: %(message)s\\n')%args)\n", ["copy", "gettext", "os", "re", "shutil", "sys", "textwrap", "warnings"]], "atexit": [".py", "''\n\n\n\n\n\nclass __loader__(object):\n pass\n \ndef _clear(*args,**kw):\n ''\n \n pass\n \ndef _run_exitfuncs(*args,**kw):\n ''\n \n pass\n \ndef register(*args,**kw):\n ''\n\n\n\n\n\n\n \n pass\n \ndef unregister(*args,**kw):\n ''\n\n\n\n \n pass\n", []], "base64": [".py", "#! /usr/bin/env python3\n\n\"\"\"Base16, Base32, Base64 (RFC 3548), Base85 and Ascii85 data encodings\"\"\"\n\n\n\n\n\nimport re\nimport struct\nimport binascii\n\n\n__all__=[\n\n'encode','decode','encodebytes','decodebytes',\n\n'b64encode','b64decode','b32encode','b32decode',\n'b16encode','b16decode',\n\n'b85encode','b85decode','a85encode','a85decode',\n\n'standard_b64encode','standard_b64decode',\n\n\n\n\n'urlsafe_b64encode','urlsafe_b64decode',\n]\n\n\nbytes_types=(bytes,bytearray)\n\ndef _bytes_from_decode_data(s):\n if isinstance(s,str):\n try :\n return s.encode('ascii')\n except UnicodeEncodeError:\n raise ValueError('string argument should contain only ASCII characters')\n if isinstance(s,bytes_types):\n return s\n try :\n return memoryview(s).tobytes()\n except TypeError:\n raise TypeError(\"argument should be a bytes-like object or ASCII \"\n \"string, not %r\"%s.__class__.__name__)from None\n \n \n \n \ndef b64encode(s,altchars=None ):\n ''\n\n\n\n\n \n encoded=binascii.b2a_base64(s,newline=False )\n if altchars is not None :\n assert len(altchars)==2,repr(altchars)\n return encoded.translate(bytes.maketrans(b'+/',altchars))\n return encoded\n \n \ndef b64decode(s,altchars=None ,validate=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n if altchars is not None :\n altchars=_bytes_from_decode_data(altchars)\n assert len(altchars)==2,repr(altchars)\n s=s.translate(bytes.maketrans(altchars,b'+/'))\n if validate and not re.fullmatch(b'[A-Za-z0-9+/]*={0,2}',s):\n raise binascii.Error('Non-base64 digit found')\n return binascii.a2b_base64(s)\n \n \ndef standard_b64encode(s):\n ''\n\n\n \n return b64encode(s)\n \ndef standard_b64decode(s):\n ''\n\n\n\n\n\n \n return b64decode(s)\n \n \n_urlsafe_encode_translation=bytes.maketrans(b'+/',b'-_')\n_urlsafe_decode_translation=bytes.maketrans(b'-_',b'+/')\n\ndef urlsafe_b64encode(s):\n ''\n\n\n\n\n \n return b64encode(s).translate(_urlsafe_encode_translation)\n \ndef urlsafe_b64decode(s):\n ''\n\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n s=s.translate(_urlsafe_decode_translation)\n return b64decode(s)\n \n \n \n \n_b32alphabet=b'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'\n_b32tab2=None\n_b32rev=None\n\ndef b32encode(s):\n ''\n \n global _b32tab2\n \n \n if _b32tab2 is None :\n b32tab=[bytes((i,))for i in _b32alphabet]\n _b32tab2=[a+b for a in b32tab for b in b32tab]\n b32tab=None\n \n if not isinstance(s,bytes_types):\n s=memoryview(s).tobytes()\n leftover=len(s)%5\n \n if leftover:\n s=s+b'\\0'*(5 -leftover)\n encoded=bytearray()\n from_bytes=int.from_bytes\n b32tab2=_b32tab2\n for i in range(0,len(s),5):\n c=from_bytes(s[i:i+5],'big')\n encoded +=(b32tab2[c >>30]+\n b32tab2[(c >>20)&0x3ff]+\n b32tab2[(c >>10)&0x3ff]+\n b32tab2[c&0x3ff]\n )\n \n if leftover ==1:\n encoded[-6:]=b'======'\n elif leftover ==2:\n encoded[-4:]=b'===='\n elif leftover ==3:\n encoded[-3:]=b'==='\n elif leftover ==4:\n encoded[-1:]=b'='\n return bytes(encoded)\n \ndef b32decode(s,casefold=False ,map01=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _b32rev\n \n \n if _b32rev is None :\n _b32rev={v:k for k,v in enumerate(_b32alphabet)}\n s=_bytes_from_decode_data(s)\n if len(s)%8:\n raise binascii.Error('Incorrect padding')\n \n \n \n if map01 is not None :\n map01=_bytes_from_decode_data(map01)\n assert len(map01)==1,repr(map01)\n s=s.translate(bytes.maketrans(b'01',b'O'+map01))\n if casefold:\n s=s.upper()\n \n \n \n l=len(s)\n s=s.rstrip(b'=')\n padchars=l -len(s)\n \n decoded=bytearray()\n b32rev=_b32rev\n for i in range(0,len(s),8):\n quanta=s[i:i+8]\n acc=0\n try :\n for c in quanta:\n acc=(acc <<5)+b32rev[c]\n except KeyError:\n raise binascii.Error('Non-base32 digit found')from None\n decoded +=acc.to_bytes(5,'big')\n \n if l %8 or padchars not in {0,1,3,4,6}:\n raise binascii.Error('Incorrect padding')\n if padchars and decoded:\n acc <<=5 *padchars\n last=acc.to_bytes(5,'big')\n leftover=(43 -5 *padchars)//8\n decoded[-5:]=last[:leftover]\n return bytes(decoded)\n \n \n \n \n \ndef b16encode(s):\n ''\n \n return binascii.hexlify(s).upper()\n \n \ndef b16decode(s,casefold=False ):\n ''\n\n\n\n\n\n\n\n \n s=_bytes_from_decode_data(s)\n if casefold:\n s=s.upper()\n if re.search(b'[^0-9A-F]',s):\n raise binascii.Error('Non-base16 digit found')\n return binascii.unhexlify(s)\n \n \n \n \n \n_a85chars=None\n_a85chars2=None\n_A85START=b\"<~\"\n_A85END=b\"~>\"\n\ndef _85encode(b,chars,chars2,pad=False ,foldnuls=False ,foldspaces=False ):\n\n if not isinstance(b,bytes_types):\n b=memoryview(b).tobytes()\n \n padding=(-len(b))%4\n if padding:\n b=b+b'\\0'*padding\n words=struct.Struct('!%dI'%(len(b)//4)).unpack(b)\n \n chunks=[b'z'if foldnuls and not word else\n b'y'if foldspaces and word ==0x20202020 else\n (chars2[word //614125]+\n chars2[word //85 %7225]+\n chars[word %85])\n for word in words]\n \n if padding and not pad:\n if chunks[-1]==b'z':\n chunks[-1]=chars[0]*5\n chunks[-1]=chunks[-1][:-padding]\n \n return b''.join(chunks)\n \ndef a85encode(b,*,foldspaces=False ,wrapcol=0,pad=False ,adobe=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _a85chars,_a85chars2\n \n \n if _a85chars is None :\n _a85chars=[bytes((i,))for i in range(33,118)]\n _a85chars2=[(a+b)for a in _a85chars for b in _a85chars]\n \n result=_85encode(b,_a85chars,_a85chars2,pad,True ,foldspaces)\n \n if adobe:\n result=_A85START+result\n if wrapcol:\n wrapcol=max(2 if adobe else 1,wrapcol)\n chunks=[result[i:i+wrapcol]\n for i in range(0,len(result),wrapcol)]\n if adobe:\n if len(chunks[-1])+2 >wrapcol:\n chunks.append(b'')\n result=b'\\n'.join(chunks)\n if adobe:\n result +=_A85END\n \n return result\n \ndef a85decode(b,*,foldspaces=False ,adobe=False ,ignorechars=b' \\t\\n\\r\\v'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n b=_bytes_from_decode_data(b)\n if adobe:\n if not b.endswith(_A85END):\n raise ValueError(\n \"Ascii85 encoded byte sequences must end \"\n \"with {!r}\".format(_A85END)\n )\n if b.startswith(_A85START):\n b=b[2:-2]\n else :\n b=b[:-2]\n \n \n \n \n packI=struct.Struct('!I').pack\n decoded=[]\n decoded_append=decoded.append\n curr=[]\n curr_append=curr.append\n curr_clear=curr.clear\n for x in b+b'u'*4:\n if b'!'[0]<=x <=b'u'[0]:\n curr_append(x)\n if len(curr)==5:\n acc=0\n for x in curr:\n acc=85 *acc+(x -33)\n try :\n decoded_append(packI(acc))\n except struct.error:\n raise ValueError('Ascii85 overflow')from None\n curr_clear()\n elif x ==b'z'[0]:\n if curr:\n raise ValueError('z inside Ascii85 5-tuple')\n decoded_append(b'\\0\\0\\0\\0')\n elif foldspaces and x ==b'y'[0]:\n if curr:\n raise ValueError('y inside Ascii85 5-tuple')\n decoded_append(b'\\x20\\x20\\x20\\x20')\n elif x in ignorechars:\n \n continue\n else :\n raise ValueError('Non-Ascii85 digit found: %c'%x)\n \n result=b''.join(decoded)\n padding=4 -len(curr)\n if padding:\n \n result=result[:-padding]\n return result\n \n \n \n_b85alphabet=(b\"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\nb\"abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~\")\n_b85chars=None\n_b85chars2=None\n_b85dec=None\n\ndef b85encode(b,pad=False ):\n ''\n\n\n\n \n global _b85chars,_b85chars2\n \n \n if _b85chars is None :\n _b85chars=[bytes((i,))for i in _b85alphabet]\n _b85chars2=[(a+b)for a in _b85chars for b in _b85chars]\n return _85encode(b,_b85chars,_b85chars2,pad)\n \ndef b85decode(b):\n ''\n\n\n \n global _b85dec\n \n \n if _b85dec is None :\n _b85dec=[None ]*256\n for i,c in enumerate(_b85alphabet):\n _b85dec[c]=i\n \n b=_bytes_from_decode_data(b)\n padding=(-len(b))%5\n b=b+b'~'*padding\n out=[]\n packI=struct.Struct('!I').pack\n for i in range(0,len(b),5):\n chunk=b[i:i+5]\n acc=0\n try :\n for c in chunk:\n acc=acc *85+_b85dec[c]\n except TypeError:\n for j,c in enumerate(chunk):\n if _b85dec[c]is None :\n raise ValueError('bad base85 character at position %d'\n %(i+j))from None\n raise\n try :\n out.append(packI(acc))\n except struct.error:\n raise ValueError('base85 overflow in hunk starting at byte %d'\n %i)from None\n \n result=b''.join(out)\n if padding:\n result=result[:-padding]\n return result\n \n \n \n \n \nMAXLINESIZE=76\nMAXBINSIZE=(MAXLINESIZE //4)*3\n\ndef encode(input,output):\n ''\n while True :\n s=input.read(MAXBINSIZE)\n if not s:\n break\n while len(s)\":\n return filename\n canonic=self.fncache.get(filename)\n if not canonic:\n canonic=os.path.abspath(filename)\n canonic=os.path.normcase(canonic)\n self.fncache[filename]=canonic\n return canonic\n \n def reset(self):\n ''\n import linecache\n linecache.checkcache()\n self.botframe=None\n self._set_stopinfo(None ,None )\n \n def trace_dispatch(self,frame,event,arg):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.quitting:\n return\n if event =='line':\n return self.dispatch_line(frame)\n if event =='call':\n return self.dispatch_call(frame,arg)\n if event =='return':\n return self.dispatch_return(frame,arg)\n if event =='exception':\n return self.dispatch_exception(frame,arg)\n if event =='c_call':\n return self.trace_dispatch\n if event =='c_exception':\n return self.trace_dispatch\n if event =='c_return':\n return self.trace_dispatch\n print('bdb.Bdb.dispatch: unknown debugging event:',repr(event))\n return self.trace_dispatch\n \n def dispatch_line(self,frame):\n ''\n\n\n\n\n \n if self.stop_here(frame)or self.break_here(frame):\n self.user_line(frame)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_call(self,frame,arg):\n ''\n\n\n\n\n \n \n if self.botframe is None :\n \n self.botframe=frame.f_back\n return self.trace_dispatch\n if not (self.stop_here(frame)or self.break_anywhere(frame)):\n \n return\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n self.user_call(frame,arg)\n if self.quitting:raise BdbQuit\n return self.trace_dispatch\n \n def dispatch_return(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame)or frame ==self.returnframe:\n \n if self.stopframe and frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n return self.trace_dispatch\n try :\n self.frame_returning=frame\n self.user_return(frame,arg)\n finally :\n self.frame_returning=None\n if self.quitting:raise BdbQuit\n \n if self.stopframe is frame and self.stoplineno !=-1:\n self._set_stopinfo(None ,None )\n return self.trace_dispatch\n \n def dispatch_exception(self,frame,arg):\n ''\n\n\n\n\n \n if self.stop_here(frame):\n \n \n \n if not (frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]is StopIteration and arg[2]is None ):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n \n \n \n elif (self.stopframe and frame is not self.stopframe\n and self.stopframe.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS\n and arg[0]in (StopIteration,GeneratorExit)):\n self.user_exception(frame,arg)\n if self.quitting:raise BdbQuit\n \n return self.trace_dispatch\n \n \n \n \n \n def is_skipped_module(self,module_name):\n ''\n if module_name is None :\n return False\n for pattern in self.skip:\n if fnmatch.fnmatch(module_name,pattern):\n return True\n return False\n \n def stop_here(self,frame):\n ''\n \n \n if self.skip and\\\n self.is_skipped_module(frame.f_globals.get('__name__')):\n return False\n if frame is self.stopframe:\n if self.stoplineno ==-1:\n return False\n return frame.f_lineno >=self.stoplineno\n if not self.stopframe:\n return True\n return False\n \n def break_here(self,frame):\n ''\n\n\n\n \n filename=self.canonic(frame.f_code.co_filename)\n if filename not in self.breaks:\n return False\n lineno=frame.f_lineno\n if lineno not in self.breaks[filename]:\n \n \n lineno=frame.f_code.co_firstlineno\n if lineno not in self.breaks[filename]:\n return False\n \n \n (bp,flag)=effective(filename,lineno,frame)\n if bp:\n self.currentbp=bp.number\n if (flag and bp.temporary):\n self.do_clear(str(bp.number))\n return True\n else :\n return False\n \n def do_clear(self,arg):\n ''\n\n\n \n raise NotImplementedError(\"subclass of bdb must implement do_clear()\")\n \n def break_anywhere(self,frame):\n ''\n \n return self.canonic(frame.f_code.co_filename)in self.breaks\n \n \n \n \n def user_call(self,frame,argument_list):\n ''\n pass\n \n def user_line(self,frame):\n ''\n pass\n \n def user_return(self,frame,return_value):\n ''\n pass\n \n def user_exception(self,frame,exc_info):\n ''\n pass\n \n def _set_stopinfo(self,stopframe,returnframe,stoplineno=0):\n ''\n\n\n\n\n \n self.stopframe=stopframe\n self.returnframe=returnframe\n self.quitting=False\n \n \n self.stoplineno=stoplineno\n \n \n \n \n def set_until(self,frame,lineno=None ):\n ''\n \n \n if lineno is None :\n lineno=frame.f_lineno+1\n self._set_stopinfo(frame,frame,lineno)\n \n def set_step(self):\n ''\n \n \n \n \n if self.frame_returning:\n caller_frame=self.frame_returning.f_back\n if caller_frame and not caller_frame.f_trace:\n caller_frame.f_trace=self.trace_dispatch\n self._set_stopinfo(None ,None )\n \n def set_next(self,frame):\n ''\n self._set_stopinfo(frame,None )\n \n def set_return(self,frame):\n ''\n if frame.f_code.co_flags&GENERATOR_AND_COROUTINE_FLAGS:\n self._set_stopinfo(frame,None ,-1)\n else :\n self._set_stopinfo(frame.f_back,frame)\n \n def set_trace(self,frame=None ):\n ''\n\n\n \n if frame is None :\n frame=sys._getframe().f_back\n self.reset()\n while frame:\n frame.f_trace=self.trace_dispatch\n self.botframe=frame\n frame=frame.f_back\n self.set_step()\n sys.settrace(self.trace_dispatch)\n \n def set_continue(self):\n ''\n\n\n \n \n self._set_stopinfo(self.botframe,None ,-1)\n if not self.breaks:\n \n sys.settrace(None )\n frame=sys._getframe().f_back\n while frame and frame is not self.botframe:\n del frame.f_trace\n frame=frame.f_back\n \n def set_quit(self):\n ''\n\n\n \n self.stopframe=self.botframe\n self.returnframe=None\n self.quitting=True\n sys.settrace(None )\n \n \n \n \n \n \n \n \n def set_break(self,filename,lineno,temporary=False ,cond=None ,\n funcname=None ):\n ''\n\n\n\n \n filename=self.canonic(filename)\n import linecache\n line=linecache.getline(filename,lineno)\n if not line:\n return 'Line %s:%d does not exist'%(filename,lineno)\n list=self.breaks.setdefault(filename,[])\n if lineno not in list:\n list.append(lineno)\n bp=Breakpoint(filename,lineno,temporary,cond,funcname)\n return None\n \n def _prune_breaks(self,filename,lineno):\n ''\n\n\n\n\n\n \n if (filename,lineno)not in Breakpoint.bplist:\n self.breaks[filename].remove(lineno)\n if not self.breaks[filename]:\n del self.breaks[filename]\n \n def clear_break(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n if lineno not in self.breaks[filename]:\n return 'There is no breakpoint at %s:%d'%(filename,lineno)\n \n \n for bp in Breakpoint.bplist[filename,lineno][:]:\n bp.deleteMe()\n self._prune_breaks(filename,lineno)\n return None\n \n def clear_bpbynumber(self,arg):\n ''\n\n\n \n try :\n bp=self.get_bpbynumber(arg)\n except ValueError as err:\n return str(err)\n bp.deleteMe()\n self._prune_breaks(bp.file,bp.line)\n return None\n \n def clear_all_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename not in self.breaks:\n return 'There are no breakpoints in %s'%filename\n for line in self.breaks[filename]:\n blist=Breakpoint.bplist[filename,line]\n for bp in blist:\n bp.deleteMe()\n del self.breaks[filename]\n return None\n \n def clear_all_breaks(self):\n ''\n\n\n \n if not self.breaks:\n return 'There are no breakpoints'\n for bp in Breakpoint.bpbynumber:\n if bp:\n bp.deleteMe()\n self.breaks={}\n return None\n \n def get_bpbynumber(self,arg):\n ''\n\n\n\n \n if not arg:\n raise ValueError('Breakpoint number expected')\n try :\n number=int(arg)\n except ValueError:\n raise ValueError('Non-numeric breakpoint number %s'%arg)from None\n try :\n bp=Breakpoint.bpbynumber[number]\n except IndexError:\n raise ValueError('Breakpoint number %d out of range'%number)from None\n if bp is None :\n raise ValueError('Breakpoint %d already deleted'%number)\n return bp\n \n def get_break(self,filename,lineno):\n ''\n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]\n \n def get_breaks(self,filename,lineno):\n ''\n\n\n \n filename=self.canonic(filename)\n return filename in self.breaks and\\\n lineno in self.breaks[filename]and\\\n Breakpoint.bplist[filename,lineno]or []\n \n def get_file_breaks(self,filename):\n ''\n\n\n \n filename=self.canonic(filename)\n if filename in self.breaks:\n return self.breaks[filename]\n else :\n return []\n \n def get_all_breaks(self):\n ''\n return self.breaks\n \n \n \n \n def get_stack(self,f,t):\n ''\n\n\n\n \n stack=[]\n if t and t.tb_frame is f:\n t=t.tb_next\n while f is not None :\n stack.append((f,f.f_lineno))\n if f is self.botframe:\n break\n f=f.f_back\n stack.reverse()\n i=max(0,len(stack)-1)\n while t is not None :\n stack.append((t.tb_frame,t.tb_lineno))\n t=t.tb_next\n if f is None :\n i=max(0,len(stack)-1)\n return stack,i\n \n def format_stack_entry(self,frame_lineno,lprefix=': '):\n ''\n\n\n\n\n\n\n \n import linecache,reprlib\n frame,lineno=frame_lineno\n filename=self.canonic(frame.f_code.co_filename)\n s='%s(%r)'%(filename,lineno)\n if frame.f_code.co_name:\n s +=frame.f_code.co_name\n else :\n s +=\"\"\n s +='()'\n if '__return__'in frame.f_locals:\n rv=frame.f_locals['__return__']\n s +='->'\n s +=reprlib.repr(rv)\n line=linecache.getline(filename,lineno,frame.f_globals)\n if line:\n s +=lprefix+line.strip()\n return s\n \n \n \n \n \n def run(self,cmd,globals=None ,locals=None ):\n ''\n\n\n \n if globals is None :\n import __main__\n globals=__main__.__dict__\n if locals is None :\n locals=globals\n self.reset()\n if isinstance(cmd,str):\n cmd=compile(cmd,\"\",\"exec\")\n sys.settrace(self.trace_dispatch)\n try :\n exec(cmd,globals,locals)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n \n def runeval(self,expr,globals=None ,locals=None ):\n ''\n\n\n \n if globals is None :\n import __main__\n globals=__main__.__dict__\n if locals is None :\n locals=globals\n self.reset()\n sys.settrace(self.trace_dispatch)\n try :\n return eval(expr,globals,locals)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n \n def runctx(self,cmd,globals,locals):\n ''\n \n self.run(cmd,globals,locals)\n \n \n \n def runcall(self,func,/,*args,**kwds):\n ''\n\n\n \n self.reset()\n sys.settrace(self.trace_dispatch)\n res=None\n try :\n res=func(*args,**kwds)\n except BdbQuit:\n pass\n finally :\n self.quitting=True\n sys.settrace(None )\n return res\n \n \ndef set_trace():\n ''\n Bdb().set_trace()\n \n \nclass Breakpoint:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n next=1\n bplist={}\n bpbynumber=[None ]\n \n \n \n def __init__(self,file,line,temporary=False ,cond=None ,funcname=None ):\n self.funcname=funcname\n \n self.func_first_executable_line=None\n self.file=file\n self.line=line\n self.temporary=temporary\n self.cond=cond\n self.enabled=True\n self.ignore=0\n self.hits=0\n self.number=Breakpoint.next\n Breakpoint.next +=1\n \n self.bpbynumber.append(self)\n if (file,line)in self.bplist:\n self.bplist[file,line].append(self)\n else :\n self.bplist[file,line]=[self]\n \n def deleteMe(self):\n ''\n\n\n\n \n \n index=(self.file,self.line)\n self.bpbynumber[self.number]=None\n self.bplist[index].remove(self)\n if not self.bplist[index]:\n \n del self.bplist[index]\n \n def enable(self):\n ''\n self.enabled=True\n \n def disable(self):\n ''\n self.enabled=False\n \n def bpprint(self,out=None ):\n ''\n\n\n\n \n if out is None :\n out=sys.stdout\n print(self.bpformat(),file=out)\n \n def bpformat(self):\n ''\n\n\n\n\n\n \n if self.temporary:\n disp='del '\n else :\n disp='keep '\n if self.enabled:\n disp=disp+'yes '\n else :\n disp=disp+'no '\n ret='%-4dbreakpoint %s at %s:%d'%(self.number,disp,\n self.file,self.line)\n if self.cond:\n ret +='\\n\\tstop only if %s'%(self.cond,)\n if self.ignore:\n ret +='\\n\\tignore next %d hits'%(self.ignore,)\n if self.hits:\n if self.hits >1:\n ss='s'\n else :\n ss=''\n ret +='\\n\\tbreakpoint already hit %d time%s'%(self.hits,ss)\n return ret\n \n def __str__(self):\n ''\n return 'breakpoint %s at %s:%s'%(self.number,self.file,self.line)\n \n \n \n \ndef checkfuncname(b,frame):\n ''\n\n\n\n\n\n \n if not b.funcname:\n \n if b.line !=frame.f_lineno:\n \n \n return False\n return True\n \n \n if frame.f_code.co_name !=b.funcname:\n \n return False\n \n \n if not b.func_first_executable_line:\n \n b.func_first_executable_line=frame.f_lineno\n \n if b.func_first_executable_line !=frame.f_lineno:\n \n return False\n return True\n \n \n \n \ndef effective(file,line,frame):\n ''\n\n\n\n\n\n \n possibles=Breakpoint.bplist[file,line]\n for b in possibles:\n if not b.enabled:\n continue\n if not checkfuncname(b,frame):\n continue\n \n b.hits +=1\n if not b.cond:\n \n if b.ignore >0:\n b.ignore -=1\n continue\n else :\n \n return (b,True )\n else :\n \n \n \n try :\n val=eval(b.cond,frame.f_globals,frame.f_locals)\n if val:\n if b.ignore >0:\n b.ignore -=1\n \n else :\n return (b,True )\n \n \n except :\n \n \n \n return (b,False )\n return (None ,None )\n \n \n \n \nclass Tdb(Bdb):\n def user_call(self,frame,args):\n name=frame.f_code.co_name\n if not name:name='???'\n print('+++ call',name,args)\n def user_line(self,frame):\n import linecache\n name=frame.f_code.co_name\n if not name:name='???'\n fn=self.canonic(frame.f_code.co_filename)\n line=linecache.getline(fn,frame.f_lineno,frame.f_globals)\n print('+++',fn,frame.f_lineno,name,':',line.strip())\n def user_return(self,frame,retval):\n print('+++ return',retval)\n def user_exception(self,frame,exc_stuff):\n print('+++ exception',exc_stuff)\n self.set_continue()\n \ndef foo(n):\n print('foo(',n,')')\n x=bar(n *10)\n print('bar returned',x)\n \ndef bar(a):\n print('bar(',a,')')\n return a /2\n \ndef test():\n t=Tdb()\n t.run('import bdb; bdb.foo(10)')\n", ["__main__", "fnmatch", "inspect", "linecache", "os", "reprlib", "sys"]], "binascii": [".py", "''\n\n\n\n\n\n\n\nimport _base64\n\nfrom _binascii import *\n\nclass Error(ValueError):\n def __init__(self,msg=''):\n self._msg=msg\n \n def __str__(self):\n return \" binascii.Error: \"+self._msg\n \n \nclass Done(Exception):\n pass\n \nclass Incomplete(Error):\n pass\n \ndef a2b_uu(s):\n if not s:\n return ''\n \n length=(ord(s[0])-0x20)%64\n \n def quadruplets_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2]),ord(s[3])\n except IndexError:\n s +=' '\n yield ord(s[0]),ord(s[1]),ord(s[2]),ord(s[3])\n return\n s=s[4:]\n \n try :\n result=[''.join(\n [chr((A -0x20)<<2 |(((B -0x20)>>4)&0x3)),\n chr(((B -0x20)&0xf)<<4 |(((C -0x20)>>2)&0xf)),\n chr(((C -0x20)&0x3)<<6 |((D -0x20)&0x3f))\n ])for A,B,C,D in quadruplets_gen(s[1:].rstrip())]\n except ValueError:\n raise Error('Illegal char')\n result=''.join(result)\n trailingdata=result[length:]\n if trailingdata.strip('\\x00'):\n raise Error('Trailing garbage')\n result=result[:length]\n if len(result)>2)&0x3F],\n table_b2a_base64[((A <<4)|((B >>4)&0xF))&0x3F],\n table_b2a_base64[((B <<2)|((C >>6)&0x3))&0x3F],\n table_b2a_base64[(C)&0x3F]])\n for A,B,C in a]\n \n final=s[length -final_length:]\n if final_length ==0:\n snippet=''\n elif final_length ==1:\n a=final[0]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[(a <<4)&0x3F]+'=='\n else :\n a=final[0]\n b=final[1]\n snippet=table_b2a_base64[(a >>2)&0x3F]+\\\n table_b2a_base64[((a <<4)|(b >>4)&0xF)&0x3F]+\\\n table_b2a_base64[(b <<2)&0x3F]+'='\n \n result=''.join(result)+snippet\n if newline:\n result +='\\n'\n return bytes(result,__BRYTHON__.charset)\n \ndef a2b_qp(s,header=False ):\n inp=0\n odata=[]\n while inp =len(s):\n break\n \n if (s[inp]=='\\n')or (s[inp]=='\\r'):\n if s[inp]!='\\n':\n while inp 0 and data[lf -1]=='\\r'\n \n inp=0\n linelen=0\n odata=[]\n while inp '~'or\n c =='='or\n (header and c =='_')or\n (c =='.'and linelen ==0 and (inp+1 ==len(data)or\n data[inp+1]=='\\n'or\n data[inp+1]=='\\r'))or\n (not istext and (c =='\\r'or c =='\\n'))or\n ((c =='\\t'or c ==' ')and (inp+1 ==len(data)))or\n (c <=' 'and c !='\\r'and c !='\\n'and\n (quotetabs or (not quotetabs and (c !='\\t'and c !=' '))))):\n linelen +=3\n if linelen >=MAXLINESIZE:\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=3\n odata.append('='+two_hex_digits(ord(c)))\n inp +=1\n else :\n if (istext and\n (c =='\\n'or (inp+1 0 and\n (odata[-1]==' 'or odata[-1]=='\\t')):\n ch=ord(odata[-1])\n odata[-1]='='\n odata.append(two_hex_digits(ch))\n \n if crlf:odata.append('\\r')\n odata.append('\\n')\n if c =='\\r':\n inp +=2\n else :\n inp +=1\n else :\n if (inp+1 =MAXLINESIZE):\n odata.append('=')\n if crlf:odata.append('\\r')\n odata.append('\\n')\n linelen=0\n \n linelen +=1\n if header and c ==' ':\n c='_'\n odata.append(c)\n inp +=1\n return ''.join(odata)\n \nhex_numbers='0123456789ABCDEF'\ndef hex(n):\n if n ==0:\n return '0'\n \n if n <0:\n n=-n\n sign='-'\n else :\n sign=''\n arr=[]\n \n def hex_gen(n):\n ''\n while n:\n yield n %0x10\n n=n /0x10\n \n for nibble in hex_gen(n):\n arr=[hex_numbers[nibble]]+arr\n return sign+''.join(arr)\n \ndef two_hex_digits(n):\n return hex_numbers[n /0x10]+hex_numbers[n %0x10]\n \n \ndef strhex_to_int(s):\n i=0\n for c in s:\n i=i *0x10+hex_numbers.index(c)\n return i\n \nhqx_encoding='!\"#$%&\\'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr'\n\nDONE=0x7f\nSKIP=0x7e\nFAIL=0x7d\n\ntable_a2b_hqx=[\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,SKIP,FAIL,FAIL,SKIP,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,0x00,0x01,0x02,0x03,0x04,0x05,0x06,\n\n0x07,0x08,0x09,0x0A,0x0B,0x0C,FAIL,FAIL,\n\n0x0D,0x0E,0x0F,0x10,0x11,0x12,0x13,FAIL,\n\n0x14,0x15,DONE,FAIL,FAIL,FAIL,FAIL,FAIL,\n\n0x16,0x17,0x18,0x19,0x1A,0x1B,0x1C,0x1D,\n\n0x1E,0x1F,0x20,0x21,0x22,0x23,0x24,FAIL,\n\n0x25,0x26,0x27,0x28,0x29,0x2A,0x2B,FAIL,\n\n0x2C,0x2D,0x2E,0x2F,FAIL,FAIL,FAIL,FAIL,\n\n0x30,0x31,0x32,0x33,0x34,0x35,0x36,FAIL,\n\n0x37,0x38,0x39,0x3A,0x3B,0x3C,FAIL,FAIL,\n\n0x3D,0x3E,0x3F,FAIL,FAIL,FAIL,FAIL,FAIL,\n\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\nFAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,FAIL,\n]\n\ndef a2b_hqx(s):\n result=[]\n \n def quadruples_gen(s):\n t=[]\n for c in s:\n res=table_a2b_hqx[ord(c)]\n if res ==SKIP:\n continue\n elif res ==FAIL:\n raise Error('Illegal character')\n elif res ==DONE:\n yield t\n raise Done\n else :\n t.append(res)\n if len(t)==4:\n yield t\n t=[]\n yield t\n \n done=0\n try :\n for snippet in quadruples_gen(s):\n length=len(snippet)\n if length ==4:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n result.append(chr(((snippet[2]&0x03)<<6)|(snippet[3])))\n elif length ==3:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n result.append(chr(((snippet[1]&0x0f)<<4)|(snippet[2]>>2)))\n elif length ==2:\n result.append(chr(((snippet[0]&0x3f)<<2)|(snippet[1]>>4)))\n except Done:\n done=1\n except Error:\n raise\n return (''.join(result),done)\n \n \n \ndef b2a_hqx(s):\n result=[]\n \n def triples_gen(s):\n while s:\n try :\n yield ord(s[0]),ord(s[1]),ord(s[2])\n except IndexError:\n yield tuple([ord(c)for c in s])\n s=s[3:]\n \n for snippet in triples_gen(s):\n length=len(snippet)\n if length ==3:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2 |((snippet[2]&0xc0)>>6)])\n result.append(hqx_encoding[snippet[2]&0x3f])\n elif length ==2:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)|((snippet[1]&0xf0)>>4)])\n result.append(hqx_encoding[\n (snippet[1]&0x0f)<<2])\n elif length ==1:\n result.append(\n hqx_encoding[(snippet[0]&0xfc)>>2])\n result.append(hqx_encoding[\n ((snippet[0]&0x03)<<4)])\n return ''.join(result)\n \ncrctab_hqx=[\n0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,\n0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,\n0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,\n0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,\n0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,\n0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,\n0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,\n0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,\n0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,\n0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,\n0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,\n0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,\n0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,\n0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,\n0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,\n0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,\n0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,\n0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,\n0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,\n0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,\n0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,\n0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,\n0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,\n0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,\n0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,\n0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,\n0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,\n0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,\n0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,\n0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,\n0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,\n0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0,\n]\n\ndef crc_hqx(s,crc):\n for c in s:\n crc=((crc <<8)&0xff00)^crctab_hqx[((crc >>8)&0xff)^ord(c)]\n \n return crc\n \ndef rlecode_hqx(s):\n ''\n\n\n\n \n if not s:\n return ''\n result=[]\n prev=s[0]\n count=1\n \n \n \n \n if s[-1]=='!':\n s=s[1:]+'?'\n else :\n s=s[1:]+'!'\n \n for c in s:\n if c ==prev and count <255:\n count +=1\n else :\n if count ==1:\n if prev !='\\x90':\n result.append(prev)\n else :\n result.extend(['\\x90','\\x00'])\n elif count <4:\n if prev !='\\x90':\n result.extend([prev]*count)\n else :\n result.extend(['\\x90','\\x00']*count)\n else :\n if prev !='\\x90':\n result.extend([prev,'\\x90',chr(count)])\n else :\n result.extend(['\\x90','\\x00','\\x90',chr(count)])\n count=1\n prev=c\n \n return ''.join(result)\n \ndef rledecode_hqx(s):\n s=s.split('\\x90')\n result=[s[0]]\n prev=s[0]\n for snippet in s[1:]:\n count=ord(snippet[0])\n if count >0:\n result.append(prev[-1]*(count -1))\n prev=snippet\n else :\n result.append('\\x90')\n prev='\\x90'\n result.append(snippet[1:])\n \n return ''.join(result)\n \ncrc_32_tab=[\n0x00000000,0x77073096,0xee0e612c,0x990951ba,0x076dc419,\n0x706af48f,0xe963a535,0x9e6495a3,0x0edb8832,0x79dcb8a4,\n0xe0d5e91e,0x97d2d988,0x09b64c2b,0x7eb17cbd,0xe7b82d07,\n0x90bf1d91,0x1db71064,0x6ab020f2,0xf3b97148,0x84be41de,\n0x1adad47d,0x6ddde4eb,0xf4d4b551,0x83d385c7,0x136c9856,\n0x646ba8c0,0xfd62f97a,0x8a65c9ec,0x14015c4f,0x63066cd9,\n0xfa0f3d63,0x8d080df5,0x3b6e20c8,0x4c69105e,0xd56041e4,\n0xa2677172,0x3c03e4d1,0x4b04d447,0xd20d85fd,0xa50ab56b,\n0x35b5a8fa,0x42b2986c,0xdbbbc9d6,0xacbcf940,0x32d86ce3,\n0x45df5c75,0xdcd60dcf,0xabd13d59,0x26d930ac,0x51de003a,\n0xc8d75180,0xbfd06116,0x21b4f4b5,0x56b3c423,0xcfba9599,\n0xb8bda50f,0x2802b89e,0x5f058808,0xc60cd9b2,0xb10be924,\n0x2f6f7c87,0x58684c11,0xc1611dab,0xb6662d3d,0x76dc4190,\n0x01db7106,0x98d220bc,0xefd5102a,0x71b18589,0x06b6b51f,\n0x9fbfe4a5,0xe8b8d433,0x7807c9a2,0x0f00f934,0x9609a88e,\n0xe10e9818,0x7f6a0dbb,0x086d3d2d,0x91646c97,0xe6635c01,\n0x6b6b51f4,0x1c6c6162,0x856530d8,0xf262004e,0x6c0695ed,\n0x1b01a57b,0x8208f4c1,0xf50fc457,0x65b0d9c6,0x12b7e950,\n0x8bbeb8ea,0xfcb9887c,0x62dd1ddf,0x15da2d49,0x8cd37cf3,\n0xfbd44c65,0x4db26158,0x3ab551ce,0xa3bc0074,0xd4bb30e2,\n0x4adfa541,0x3dd895d7,0xa4d1c46d,0xd3d6f4fb,0x4369e96a,\n0x346ed9fc,0xad678846,0xda60b8d0,0x44042d73,0x33031de5,\n0xaa0a4c5f,0xdd0d7cc9,0x5005713c,0x270241aa,0xbe0b1010,\n0xc90c2086,0x5768b525,0x206f85b3,0xb966d409,0xce61e49f,\n0x5edef90e,0x29d9c998,0xb0d09822,0xc7d7a8b4,0x59b33d17,\n0x2eb40d81,0xb7bd5c3b,0xc0ba6cad,0xedb88320,0x9abfb3b6,\n0x03b6e20c,0x74b1d29a,0xead54739,0x9dd277af,0x04db2615,\n0x73dc1683,0xe3630b12,0x94643b84,0x0d6d6a3e,0x7a6a5aa8,\n0xe40ecf0b,0x9309ff9d,0x0a00ae27,0x7d079eb1,0xf00f9344,\n0x8708a3d2,0x1e01f268,0x6906c2fe,0xf762575d,0x806567cb,\n0x196c3671,0x6e6b06e7,0xfed41b76,0x89d32be0,0x10da7a5a,\n0x67dd4acc,0xf9b9df6f,0x8ebeeff9,0x17b7be43,0x60b08ed5,\n0xd6d6a3e8,0xa1d1937e,0x38d8c2c4,0x4fdff252,0xd1bb67f1,\n0xa6bc5767,0x3fb506dd,0x48b2364b,0xd80d2bda,0xaf0a1b4c,\n0x36034af6,0x41047a60,0xdf60efc3,0xa867df55,0x316e8eef,\n0x4669be79,0xcb61b38c,0xbc66831a,0x256fd2a0,0x5268e236,\n0xcc0c7795,0xbb0b4703,0x220216b9,0x5505262f,0xc5ba3bbe,\n0xb2bd0b28,0x2bb45a92,0x5cb36a04,0xc2d7ffa7,0xb5d0cf31,\n0x2cd99e8b,0x5bdeae1d,0x9b64c2b0,0xec63f226,0x756aa39c,\n0x026d930a,0x9c0906a9,0xeb0e363f,0x72076785,0x05005713,\n0x95bf4a82,0xe2b87a14,0x7bb12bae,0x0cb61b38,0x92d28e9b,\n0xe5d5be0d,0x7cdcefb7,0x0bdbdf21,0x86d3d2d4,0xf1d4e242,\n0x68ddb3f8,0x1fda836e,0x81be16cd,0xf6b9265b,0x6fb077e1,\n0x18b74777,0x88085ae6,0xff0f6a70,0x66063bca,0x11010b5c,\n0x8f659eff,0xf862ae69,0x616bffd3,0x166ccf45,0xa00ae278,\n0xd70dd2ee,0x4e048354,0x3903b3c2,0xa7672661,0xd06016f7,\n0x4969474d,0x3e6e77db,0xaed16a4a,0xd9d65adc,0x40df0b66,\n0x37d83bf0,0xa9bcae53,0xdebb9ec5,0x47b2cf7f,0x30b5ffe9,\n0xbdbdf21c,0xcabac28a,0x53b39330,0x24b4a3a6,0xbad03605,\n0xcdd70693,0x54de5729,0x23d967bf,0xb3667a2e,0xc4614ab8,\n0x5d681b02,0x2a6f2b94,0xb40bbe37,0xc30c8ea1,0x5a05df1b,\n0x2d02ef8d\n]\n\ndef crc32(s,crc=0):\n result=0\n crc=~int(crc)&0xffffffff\n \n for c in s:\n crc=crc_32_tab[(crc ^int(ord(c)))&0xff]^(crc >>8)\n \n \n \n result=crc ^0xffffffff\n \n if result >2 **31:\n result=((result+2 **31)%2 **32)-2 **31\n \n return result\n", ["_base64", "_binascii"]], "bisect": [".py", "''\n\ndef insort_right(a,x,lo=0,hi=None ):\n ''\n\n\n\n\n\n \n \n lo=bisect_right(a,x,lo,hi)\n a.insert(lo,x)\n \ndef bisect_right(a,x,lo=0,hi=None ):\n ''\n\n\n\n\n\n\n\n \n \n if lo <0:\n raise ValueError('lo must be non-negative')\n if hi is None :\n hi=len(a)\n while lo =9:\n names=day_name\n else :\n names=day_abbr\n return names[day][:width].center(width)\n \n def formatweekheader(self,width):\n ''\n\n \n return ' '.join(self.formatweekday(i,width)for i in self.iterweekdays())\n \n def formatmonthname(self,theyear,themonth,width,withyear=True ):\n ''\n\n \n s=month_name[themonth]\n if withyear:\n s=\"%s %r\"%(s,theyear)\n return s.center(width)\n \n def prmonth(self,theyear,themonth,w=0,l=0):\n ''\n\n \n print(self.formatmonth(theyear,themonth,w,l),end='')\n \n def formatmonth(self,theyear,themonth,w=0,l=0):\n ''\n\n \n w=max(2,w)\n l=max(1,l)\n s=self.formatmonthname(theyear,themonth,7 *(w+1)-1)\n s=s.rstrip()\n s +='\\n'*l\n s +=self.formatweekheader(w).rstrip()\n s +='\\n'*l\n for week in self.monthdays2calendar(theyear,themonth):\n s +=self.formatweek(week,w).rstrip()\n s +='\\n'*l\n return s\n \n def formatyear(self,theyear,w=2,l=1,c=6,m=3):\n ''\n\n \n w=max(2,w)\n l=max(1,l)\n c=max(2,c)\n colwidth=(w+1)*7 -1\n v=[]\n a=v.append\n a(repr(theyear).center(colwidth *m+c *(m -1)).rstrip())\n a('\\n'*l)\n header=self.formatweekheader(w)\n for (i,row)in enumerate(self.yeardays2calendar(theyear,m)):\n \n months=range(m *i+1,min(m *(i+1)+1,13))\n a('\\n'*l)\n names=(self.formatmonthname(theyear,k,colwidth,False )\n for k in months)\n a(formatstring(names,colwidth,c).rstrip())\n a('\\n'*l)\n headers=(header for k in months)\n a(formatstring(headers,colwidth,c).rstrip())\n a('\\n'*l)\n \n height=max(len(cal)for cal in row)\n for j in range(height):\n weeks=[]\n for cal in row:\n if j >=len(cal):\n weeks.append('')\n else :\n weeks.append(self.formatweek(cal[j],w))\n a(formatstring(weeks,colwidth,c).rstrip())\n a('\\n'*l)\n return ''.join(v)\n \n def pryear(self,theyear,w=0,l=0,c=6,m=3):\n ''\n print(self.formatyear(theyear,w,l,c,m),end='')\n \n \nclass HTMLCalendar(Calendar):\n ''\n\n \n \n \n cssclasses=[\"mon\",\"tue\",\"wed\",\"thu\",\"fri\",\"sat\",\"sun\"]\n \n \n cssclasses_weekday_head=cssclasses\n \n \n cssclass_noday=\"noday\"\n \n \n cssclass_month_head=\"month\"\n \n \n cssclass_month=\"month\"\n \n \n cssclass_year_head=\"year\"\n \n \n cssclass_year=\"year\"\n \n def formatday(self,day,weekday):\n ''\n\n \n if day ==0:\n \n return ' '%self.cssclass_noday\n else :\n return '%d'%(self.cssclasses[weekday],day)\n \n def formatweek(self,theweek):\n ''\n\n \n s=''.join(self.formatday(d,wd)for (d,wd)in theweek)\n return '%s'%s\n \n def formatweekday(self,day):\n ''\n\n \n return '%s'%(\n self.cssclasses_weekday_head[day],day_abbr[day])\n \n def formatweekheader(self):\n ''\n\n \n s=''.join(self.formatweekday(i)for i in self.iterweekdays())\n return '%s'%s\n \n def formatmonthname(self,theyear,themonth,withyear=True ):\n ''\n\n \n if withyear:\n s='%s %s'%(month_name[themonth],theyear)\n else :\n s='%s'%month_name[themonth]\n return '%s'%(\n self.cssclass_month_head,s)\n \n def formatmonth(self,theyear,themonth,withyear=True ):\n ''\n\n \n v=[]\n a=v.append\n a(''%(\n self.cssclass_month))\n a('\\n')\n a(self.formatmonthname(theyear,themonth,withyear=withyear))\n a('\\n')\n a(self.formatweekheader())\n a('\\n')\n for week in self.monthdays2calendar(theyear,themonth):\n a(self.formatweek(week))\n a('\\n')\n a('
')\n a('\\n')\n return ''.join(v)\n \n def formatyear(self,theyear,width=3):\n ''\n\n \n v=[]\n a=v.append\n width=max(width,1)\n a(''%\n self.cssclass_year)\n a('\\n')\n a(''%(\n width,self.cssclass_year_head,theyear))\n for i in range(January,January+12,width):\n \n months=range(i,min(i+width,13))\n a('')\n for m in months:\n a('')\n a('')\n a('
%s
')\n a(self.formatmonth(theyear,m,withyear=False ))\n a('
')\n return ''.join(v)\n \n def formatyearpage(self,theyear,width=3,css='calendar.css',encoding=None ):\n ''\n\n \n if encoding is None :\n encoding=sys.getdefaultencoding()\n v=[]\n a=v.append\n a('\\n'%encoding)\n a('\\n')\n a('\\n')\n a('\\n')\n a('\\n'%encoding)\n if css is not None :\n a('\\n'%css)\n a('Calendar for %d\\n'%theyear)\n a('\\n')\n a('\\n')\n a(self.formatyear(theyear,width))\n a('\\n')\n a('\\n')\n return ''.join(v).encode(encoding,\"xmlcharrefreplace\")\n \n \nclass different_locale:\n def __init__(self,locale):\n self.locale=locale\n \n def __enter__(self):\n self.oldlocale=_locale.getlocale(_locale.LC_TIME)\n _locale.setlocale(_locale.LC_TIME,self.locale)\n \n def __exit__(self,*args):\n _locale.setlocale(_locale.LC_TIME,self.oldlocale)\n \n \nclass LocaleTextCalendar(TextCalendar):\n ''\n\n\n\n\n \n \n def __init__(self,firstweekday=0,locale=None ):\n TextCalendar.__init__(self,firstweekday)\n if locale is None :\n locale=_locale.getdefaultlocale()\n self.locale=locale\n \n def formatweekday(self,day,width):\n with different_locale(self.locale):\n if width >=9:\n names=day_name\n else :\n names=day_abbr\n name=names[day]\n return name[:width].center(width)\n \n def formatmonthname(self,theyear,themonth,width,withyear=True ):\n with different_locale(self.locale):\n s=month_name[themonth]\n if withyear:\n s=\"%s %r\"%(s,theyear)\n return s.center(width)\n \n \nclass LocaleHTMLCalendar(HTMLCalendar):\n ''\n\n\n\n\n \n def __init__(self,firstweekday=0,locale=None ):\n HTMLCalendar.__init__(self,firstweekday)\n if locale is None :\n locale=_locale.getdefaultlocale()\n self.locale=locale\n \n def formatweekday(self,day):\n with different_locale(self.locale):\n s=day_abbr[day]\n return '%s'%(self.cssclasses[day],s)\n \n def formatmonthname(self,theyear,themonth,withyear=True ):\n with different_locale(self.locale):\n s=month_name[themonth]\n if withyear:\n s='%s %s'%(s,theyear)\n return '%s'%s\n \n \n \nc=TextCalendar()\n\nfirstweekday=c.getfirstweekday\n\ndef setfirstweekday(firstweekday):\n if not MONDAY <=firstweekday <=SUNDAY:\n raise IllegalWeekdayError(firstweekday)\n c.firstweekday=firstweekday\n \nmonthcalendar=c.monthdayscalendar\nprweek=c.prweek\nweek=c.formatweek\nweekheader=c.formatweekheader\nprmonth=c.prmonth\nmonth=c.formatmonth\ncalendar=c.formatyear\nprcal=c.pryear\n\n\n\n_colwidth=7 *3 -1\n_spacing=6\n\n\ndef format(cols,colwidth=_colwidth,spacing=_spacing):\n ''\n print(formatstring(cols,colwidth,spacing))\n \n \ndef formatstring(cols,colwidth=_colwidth,spacing=_spacing):\n ''\n spacing *=' '\n return spacing.join(c.center(colwidth)for c in cols)\n \n \nEPOCH=1970\n_EPOCH_ORD=datetime.date(EPOCH,1,1).toordinal()\n\n\ndef timegm(tuple):\n ''\n year,month,day,hour,minute,second=tuple[:6]\n days=datetime.date(year,month,1).toordinal()-_EPOCH_ORD+day -1\n hours=days *24+hour\n minutes=hours *60+minute\n seconds=minutes *60+second\n return seconds\n \n \ndef main(args):\n import argparse\n parser=argparse.ArgumentParser()\n textgroup=parser.add_argument_group('text only arguments')\n htmlgroup=parser.add_argument_group('html only arguments')\n textgroup.add_argument(\n \"-w\",\"--width\",\n type=int,default=2,\n help=\"width of date column (default 2)\"\n )\n textgroup.add_argument(\n \"-l\",\"--lines\",\n type=int,default=1,\n help=\"number of lines for each week (default 1)\"\n )\n textgroup.add_argument(\n \"-s\",\"--spacing\",\n type=int,default=6,\n help=\"spacing between months (default 6)\"\n )\n textgroup.add_argument(\n \"-m\",\"--months\",\n type=int,default=3,\n help=\"months per row (default 3)\"\n )\n htmlgroup.add_argument(\n \"-c\",\"--css\",\n default=\"calendar.css\",\n help=\"CSS to use for page\"\n )\n parser.add_argument(\n \"-L\",\"--locale\",\n default=None ,\n help=\"locale to be used from month and weekday names\"\n )\n parser.add_argument(\n \"-e\",\"--encoding\",\n default=None ,\n help=\"encoding to use for output\"\n )\n parser.add_argument(\n \"-t\",\"--type\",\n default=\"text\",\n choices=(\"text\",\"html\"),\n help=\"output type (text or html)\"\n )\n parser.add_argument(\n \"year\",\n nargs='?',type=int,\n help=\"year number (1-9999)\"\n )\n parser.add_argument(\n \"month\",\n nargs='?',type=int,\n help=\"month number (1-12, text only)\"\n )\n \n options=parser.parse_args(args[1:])\n \n if options.locale and not options.encoding:\n parser.error(\"if --locale is specified --encoding is required\")\n sys.exit(1)\n \n locale=options.locale,options.encoding\n \n if options.type ==\"html\":\n if options.locale:\n cal=LocaleHTMLCalendar(locale=locale)\n else :\n cal=HTMLCalendar()\n encoding=options.encoding\n if encoding is None :\n encoding=sys.getdefaultencoding()\n optdict=dict(encoding=encoding,css=options.css)\n write=sys.stdout.buffer.write\n if options.year is None :\n write(cal.formatyearpage(datetime.date.today().year,**optdict))\n elif options.month is None :\n write(cal.formatyearpage(options.year,**optdict))\n else :\n parser.error(\"incorrect number of arguments\")\n sys.exit(1)\n else :\n if options.locale:\n cal=LocaleTextCalendar(locale=locale)\n else :\n cal=TextCalendar()\n optdict=dict(w=options.width,l=options.lines)\n if options.month is None :\n optdict[\"c\"]=options.spacing\n optdict[\"m\"]=options.months\n if options.year is None :\n result=cal.formatyear(datetime.date.today().year,**optdict)\n elif options.month is None :\n result=cal.formatyear(options.year,**optdict)\n else :\n result=cal.formatmonth(options.year,options.month,**optdict)\n write=sys.stdout.write\n if options.encoding:\n result=result.encode(options.encoding)\n write=sys.stdout.buffer.write\n write(result)\n \n \nif __name__ ==\"__main__\":\n main(sys.argv)\n", ["argparse", "datetime", "itertools", "locale", "sys"]], "cmath": [".py", "\n\n\n\n\n\n\n\n\n\nimport math\nimport sys\n\ndef _takes_complex(func):\n def decorated(x):\n if isinstance(x,complex):\n return func(x)\n elif type(x)in [int,float]:\n return func(complex(x))\n elif hasattr(x,'__complex__'):\n c=x.__complex__()\n if not isinstance(c,complex):\n raise TypeError(\"A complex number is required\")\n else :\n return func(c)\n elif hasattr(x,'__float__'):\n try :\n c=complex(x.__float__(),0)\n except :\n raise TypeError(\"A complex number is required\")\n return func(c)\n elif hasattr(x,'__index__'):\n try :\n c=complex(x.__index__(),0)\n except :\n raise TypeError(\"A complex number is required\")\n return func(c)\n else :\n raise TypeError(\"A complex number is required\")\n if hasattr(func,'__doc__'):\n decorated.__doc__=func.__doc__\n if hasattr(func,'__name__'):\n decorated.__name__=func.__name__\n return decorated\n \n@_takes_complex\ndef isfinite(x):\n return math.isfinite(x.imag)and math.isfinite(x.real)\n \n@_takes_complex\ndef phase(x):\n ''\n return math.atan2(x.imag,x.real)\n \n@_takes_complex\ndef polar(x):\n ''\n\n\n\n\n \n phi=math.atan2(x.imag,x.real)\n if math.isnan(x.imag):\n if math.isinf(x.real):\n return abs(x.real),nan\n return nan,nan\n elif math.isinf(x.imag):\n r=inf\n elif math.isinf(x.real):\n r=inf\n else :\n r=math.sqrt(x.real **2+x.imag **2)\n if math.isinf(r):\n raise OverflowError(\"math range error\")\n return r,phi\n \ndef rect(r,phi):\n ''\n\n \n if math.isnan(r):\n if not math.isnan(phi)and not phi:\n return complex(nan,0)\n return complex(nan,nan)\n elif math.isnan(phi):\n if not r:\n return complex(0,0)\n elif math.isinf(r):\n return complex(inf,nan)\n return complex(nan,nan)\n if math.isinf(r)or math.isinf(phi):\n \n \n if math.isinf(phi)and r !=.0 and not math.isnan(r):\n raise ValueError(\"math domain error\")\n \n \n \n \n if -inf 0:\n _real=math.copysign(inf,math.cos(phi))\n _imag=math.copysign(inf,math.sin(phi))\n else :\n _real=-math.copysign(inf,math.cos(phi));\n _imag=-math.copysign(inf,math.sin(phi));\n return complex(_real,_imag)\n return _SPECIAL_VALUE(complex(r,phi),_rect_special_values)\n \n else :\n if phi ==.0:\n \n \n \n return complex(r,phi *r)\n else :\n return complex(r *math.cos(phi),r *math.sin(phi))\n \n@_takes_complex\ndef sqrt(x):\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n s,d,ax,ay=.0,.0,math.fabs(x.real),math.fabs(x.imag)\n \n ret=_SPECIAL_VALUE(x,_sqrt_special_values)\n if ret is not None :\n return ret\n \n if math.isinf(x.imag):\n return complex(inf,x.imag)\n \n if x.real ==.0 and x.imag ==.0:\n _real=.0\n _imag=x.imag\n return complex(_real,_imag)\n \n if ay ==0:\n s=math.sqrt(ax)\n d=0\n elif ax 0. or ay >0.):\n \n AX=math.ldexp(ax,_CM_SCALE_UP)\n AY=math.ldexp(ay,_CM_SCALE_UP)\n S=math.sqrt((AX+math.hypot(AX,AY))/2.0)\n D=AY /(2 *S)\n s=math.ldexp(S,_CM_SCALE_DOWN)\n d=math.ldexp(D,_CM_SCALE_DOWN)\n else :\n ax /=8.0 ;\n s=2.0 *math.sqrt(ax+math.hypot(ax,ay /8.0));\n d=ay /(2.0 *s)\n \n if x.real >=.0:\n _real=s ;\n _imag=math.copysign(d,x.imag)\n else :\n _real=d ;\n _imag=math.copysign(s,x.imag)\n \n return complex(_real,_imag)\n \n@_takes_complex\ndef acos(x):\n ''\n\n\n\n\n \n \n ret=_SPECIAL_VALUE(x,_acos_special_values)\n if ret is not None :\n if isinstance(ret,Exception):\n raise ret\n return ret\n \n if math.fabs(x.real)>_CM_LARGE_DOUBLE or math.fabs(x.imag)>_CM_LARGE_DOUBLE:\n \n _real=math.atan2(math.fabs(x.imag),x.real)\n \n \n \n if x.real <0:\n _imag=-math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,x.imag);\n else :\n _imag=math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,-x.imag);\n elif math.isnan(x.real):\n return complex(nan,nan)\n elif math.isnan(x.imag):\n if x.real ==0:\n return complex(pi /2,nan)\n return complex(nan,nan)\n else :\n s1=complex(float(1 -x.real),-x.imag)\n s1=sqrt(s1)\n s2=complex(1.0+x.real,x.imag)\n s2=sqrt(s2)\n _real=2.0 *math.atan2(s1.real,s2.real)\n _imag=math.asinh(s2.real *s1.imag -s2.imag *s1.real)\n if not x.imag:\n if x.real >1:\n _real=0\n elif x.real <-1:\n _real=math.pi\n \n return complex(_real,_imag)\n \n@_takes_complex\ndef acosh(x):\n ''\n\n\n\n \n ret=_SPECIAL_VALUE(x,_acosh_special_values)\n if ret is not None :\n return ret\n \n if math.fabs(x.real)>_CM_LARGE_DOUBLE or math.fabs(x.imag)>_CM_LARGE_DOUBLE:\n \n _real=math.log(math.hypot(x.real /2.0,x.imag /2.0))+_M_LN2 *2.0\n _imag=math.atan2(x.imag,x.real);\n else :\n s1=sqrt(complex(x.real -1.0,x.imag))\n s2=sqrt(complex(x.real+1.0,x.imag))\n _real=math.asinh(s1.real *s2.real+s1.imag *s2.imag)\n _imag=2. *math.atan2(s1.imag,s2.real)\n \n return complex(_real,_imag)\n \n@_takes_complex\ndef asin(x):\n ''\n\n\n\n \n \n s=complex(-x.imag,x.real)\n s=asinh(s)\n return complex(s.imag,-s.real)\n \n@_takes_complex\ndef asinh(x):\n ''\n\n\n\n\n \n ret=_SPECIAL_VALUE(x,_asinh_special_values)\n if ret is not None :\n return ret\n \n if math.fabs(x.real)>_CM_LARGE_DOUBLE or math.fabs(x.imag)>_CM_LARGE_DOUBLE:\n if x.imag >=.0:\n _real=math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,x.real)\n else :\n _real=-math.copysign(math.log(math.hypot(x.real /2.,x.imag /2.))+_M_LN2 *2.,-x.real)\n _imag=math.atan2(x.imag,math.fabs(x.real))\n else :\n s1=sqrt(complex(1.0+x.imag,-x.real))\n s2=sqrt(complex(1.0 -x.imag,x.real))\n _real=math.asinh(s1.real *s2.imag -s2.real *s1.imag)\n _imag=math.atan2(x.imag,s1.real *s2.real -s1.imag *s2.imag)\n return complex(_real,_imag)\n \n@_takes_complex\ndef atan(x):\n ''\n\n\n\n\n \n ret=_SPECIAL_VALUE(x,_atan_special_values)\n if ret is not None :\n return ret\n \n if isinf(x):\n return complex(math.copysign(1,x.real)*pi /2,\n math.copysign(0,x.imag))\n s=atanh(complex(-x.imag,x.real))\n return complex(s.imag,-s.real)\n \n@_takes_complex\ndef atanh(x):\n ''\n\n\n\n\n \n \n ret=_SPECIAL_VALUE(x,_atanh_special_values)\n if ret is not None :\n return ret\n \n if isinf(x):\n return complex(math.copysign(0,x.real),\n math.copysign(1,x.imag)*pi /2)\n \n \n if x.real <.0:\n return -(atanh(-x))\n \n ay=math.fabs(x.imag)\n \n if x.real >_CM_SQRT_LARGE_DOUBLE or ay >_CM_SQRT_LARGE_DOUBLE:\n \n \n \n \n \n h=math.hypot(x.real /2.,x.imag /2.)\n _real=x.real /4. /h /h\n \n \n \n \n \n \n _imag=-math.copysign(math.pi /2.,-x.imag)\n \n elif x.real ==1.0 and ay <_CM_SQRT_DBL_MIN:\n \n \n if (ay ==.0):\n raise ValueError(\"math domain error\")\n else :\n _real=-math.log(math.sqrt(ay)/math.sqrt(math.hypot(ay,2.)))\n _imag=math.copysign(math.atan2(2.0,-ay)/2,x.imag)\n \n else :\n \n _real=math.log1p(4. *x.real /((1 -x.real)*(1 -x.real)+ay *ay))/4.\n _imag=-math.atan2(-2. *x.imag,(1 -x.real)*(1+x.real)-ay *ay)/2.\n errno=0\n \n return complex(_real,_imag)\n \n@_takes_complex\ndef cos(x):\n ''\n return cosh(complex(-x.imag,x.real))\n \n@_takes_complex\ndef cosh(x):\n ''\n \n ret=_SPECIAL_VALUE(x,_cosh_special_values)\n if ret is not None :\n if isinstance(ret,Exception):\n raise ret\n return ret\n \n if not math.isinf(x.real)and math.fabs(x.real)>_CM_LOG_LARGE_DOUBLE:\n \n \n x_minus_one=x.real -math.copysign(1.0,x.real)\n _real=cos(x.imag)*math.cosh(x_minus_one)*math.e\n _imag=sin(x.imag)*math.sinh(x_minus_one)*math.e\n elif math.isinf(x.real)and x.imag ==0:\n if x.real >0:\n return x\n else :\n return complex(inf,-x.imag)\n elif math.isinf(x.imag):\n raise ValueError(\"math domain error\")\n else :\n _real=math.cos(x.imag)*math.cosh(x.real)\n _imag=math.sin(x.imag)*math.sinh(x.real)\n \n ret=complex(_real,_imag)\n return ret\n \n@_takes_complex\ndef exp(x):\n ''\n if math.isinf(x.real)or math.isinf(x.imag):\n \n if math.isinf(x.imag)and (-inf 0):\n raise ValueError(\"math domain error\")\n \n if math.isinf(x.real)and -inf 0:\n _real=math.copysign(inf,math.cos(x.imag))\n _imag=math.copysign(inf,math.sin(x.imag))\n else :\n _real=math.copysign(.0,math.cos(x.imag))\n _imag=math.copysign(.0,math.sin(x.imag))\n return complex(_real,_imag)\n \n return _SPECIAL_VALUE(x,_exp_special_values)\n \n if math.isnan(x.real)and x.imag ==0:\n return x\n \n if x.real >_CM_LOG_LARGE_DOUBLE:\n l=math.exp(x.real -1.);\n _real=l *math.cos(x.imag)*math.e\n _imag=l *math.sin(x.imag)*math.e\n else :\n l=math.exp(x.real);\n _real=l *math.cos(x.imag)\n _imag=l *math.sin(x.imag)\n \n if math.isinf(_real)or math.isinf(_imag):\n raise OverflowError()\n \n return complex(_real,_imag)\n \ndef isclose(x,y,*,rel_tol=1e-09,abs_tol=0.0):\n try :\n complex(x)\n except ValueError:\n raise TypeError(f\"must be a number, not {x.__class__.__name__}\")\n try :\n complex(y)\n except ValueError:\n raise TypeError(f\"must be a number, not {y.__class__.__name__}\")\n rel_tol=float(rel_tol)\n abs_tol=float(abs_tol)\n if rel_tol <0.0 or abs_tol <0.0:\n raise ValueError('tolerances must be non-negative')\n if x is inf or x is _NINF or y is inf or y is _NINF:\n return y is x\n if x is nan or y is nan:\n return False\n return abs(x -y)<=max(rel_tol *float(max(abs(x),abs(y))),abs_tol)\n \n@_takes_complex\ndef isinf(x):\n ''\n return math.isinf(x.real)or math.isinf(x.imag)\n \n@_takes_complex\ndef isnan(x):\n ''\n return math.isnan(x.real)or math.isnan(x.imag)\n \n \n@_takes_complex\ndef _to_complex(x):\n return x\n \ndef log(x,base=None ):\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n x=_to_complex(x)\n \n \n \n \n \n \n denom=1 if base is None else log(base)\n ''\n\n\n\n \n \n ret=_SPECIAL_VALUE(x,_log_special_values)\n if ret is not None :\n return ret\n \n if math.isnan(x.real):\n return complex(inf if math.isinf(x.imag)else nan,nan)\n elif math.isnan(x.imag):\n return complex(inf if math.isinf(x.real)else nan,nan)\n \n ax=math.fabs(x.real)\n ay=math.fabs(x.imag)\n \n if ax >_CM_LARGE_DOUBLE or ay >_CM_LARGE_DOUBLE:\n _real=math.log(math.hypot(ax /2.0,ay /2.0))+_M_LN2\n elif ax .0 or ay >.0:\n \n _real=math.log(math.hypot(math.ldexp(ax,sys.float_info.mant_dig),math.ldexp(ay,sys.float_info.mant_dig)))-sys.float_info.mant_dig *_M_LN2\n else :\n \n raise ValueError(\"math domain error\")\n _real=-inf\n _imag=math.atan2(x.imag,x.real)\n else :\n h=math.hypot(ax,ay)\n _real=math.log(h)/denom\n if not ay:\n if type(_real)==complex:\n return _real\n if x.real <0:\n return complex(_real,math.copysign(math.pi,x.imag))\n return complex(_real,x.imag)\n _imag=math.atan2(x.imag,x.real)\n return complex(_real,_imag)\n \n@_takes_complex\ndef log10(x):\n ''\n\n\n\n \n ret=log(x)\n _real=ret.real /_M_LN10\n _imag=ret.imag /_M_LN10\n return complex(_real,_imag)\n \n@_takes_complex\ndef sin(x):\n ''\n \n s=complex(-x.imag,x.real)\n s=sinh(s)\n return complex(s.imag,-s.real)\n \n@_takes_complex\ndef sinh(x):\n ''\n \n ret=_SPECIAL_VALUE(x,_sinh_special_values)\n if ret is not None :\n if isinstance(ret,Exception):\n raise ret\n return ret\n \n if math.isinf(x.real)or math.isinf(x.imag):\n \n \n if math.isinf(x.imag)and not math.isnan(x.real):\n raise ValueError(\"math domain error\")\n \n if math.isinf(x.real)and -inf 0:\n _real=math.copysign(inf,math.cos(x.imag))\n _imag=math.copysign(inf,math.sin(x.imag))\n else :\n _real=-math.copysign(inf,math.cos(x.imag))\n _imag=math.copysign(inf,math.sin(x.imag))\n return complex(_real,_imag)\n \n return _SPECIAL_VALUE(x,_sinh_special_values)\n \n if math.fabs(x.real)>_CM_LOG_LARGE_DOUBLE:\n x_minus_one=x.real -math.copysign(1.0,x.real)\n z=complex(x_minus_one,x.imag)\n _real=math.cos(z.imag)*math.sinh(z.real)*math.e\n _imag=math.sin(z.imag)*math.cosh(z.real)*math.e\n else :\n _real=math.cos(x.imag)*math.sinh(x.real)\n _imag=math.sin(x.imag)*math.cosh(x.real)\n \n if math.isinf(_real)or math.isinf(_imag):\n raise OverflowError()\n \n return complex(_real,_imag)\n \n@_takes_complex\ndef tan(x):\n ''\n if math.isnan(x.real):\n if math.isinf(x.imag):\n return complex(0,math.copysign(1,x.imag))\n return complex(nan,nan)\n elif math.isnan(x.imag):\n if not x.real:\n return complex(math.copysign(0,x.real),nan)\n return complex(nan,nan)\n s=tanh(complex(-x.imag,x.real))\n return complex(s.imag,-s.real)\n \n@_takes_complex\ndef tanh(x):\n ''\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n if math.isnan(x.real):\n if x.imag ==0:\n return complex(nan,math.copysign(0,x.imag))\n return complex(nan,nan)\n elif math.isnan(x.imag):\n if math.isinf(x.real):\n return complex(math.copysign(1,x.real),0)\n return complex(nan,nan)\n \n if isinf(x):\n if math.isinf(x.imag)and -inf 0:\n _real=1.0\n _imag=math.copysign(.0,2.0 *math.sin(x.imag)*math.cos(x.imag))\n else :\n _real=-1.0\n _imag=math.copysign(.0,2. *math.sin(x.imag)*math.cos(x.imag))\n return complex(_real,_imag)\n return _SPECIAL_VALUE(x,_tanh_special_values)\n \n \n if math.fabs(x.real)>_CM_LOG_LARGE_DOUBLE:\n _real=math.copysign(1.,x.real)\n _imag=4. *math.sin(x.imag)*math.cos(x.imag)*math.exp(-2. *math.fabs(x.real))\n else :\n tx=math.tanh(x.real)\n ty=math.tan(x.imag)\n cx=1.0 /math.cosh(x.real)\n txty=tx *ty\n denom=1.+txty *txty\n _real=tx *(1.+ty *ty)/denom\n _imag=((ty /denom)*cx)*cx\n return complex(_real,_imag)\n \n \nFunctionType=type(_takes_complex)\nlocs=locals()\nkeys=list(locs.keys())\nfor f in keys:\n if type(locs[f])is FunctionType and not f.startswith(\"_\"):\n locals()[f]=type(abs)(locals()[f])\n \npi=math.pi\ne=math.e\ntau=math.tau\n\n_CM_LARGE_DOUBLE=sys.float_info.max /4\n_CM_SQRT_LARGE_DOUBLE=math.sqrt(_CM_LARGE_DOUBLE)\n_CM_LOG_LARGE_DOUBLE=math.log(_CM_LARGE_DOUBLE)\n_CM_SQRT_DBL_MIN=math.sqrt(sys.float_info.min)\n_M_LN2=0.6931471805599453094\n_M_LN10=2.302585092994045684\n\nif sys.float_info.radix ==2:\n _CM_SCALE_UP=int((2 *(sys.float_info.mant_dig /2)+1))\nelif sys.float_info.radix ==16:\n _CM_SCALE_UP=int((4 *sys.float_info.mant_dig+1))\nelse :\n raise (\"cmath implementation expects the float base to be either 2 or 16, got \"+str(sys.float_info.radix)+\" instead.\")\n_CM_SCALE_DOWN=int((-(_CM_SCALE_UP+1)/2))\n\ninf=float('inf')\ninfj=complex(0.0,inf)\n_NINF=float('-inf')\nnan=float('nan')\nnanj=complex(0.0,float('nan'))\n\n_P14=0.25 *pi\n_P12=0.5 *pi\n_P34=0.75 *pi\n_U=-9.5426319407711027e33\n\n\n_ST_NINF=0\n_ST_NEG=1\n_ST_NZERO=2\n_ST_PZERO=3\n_ST_POS=4\n_ST_PINF=5\n_ST_NAN=6\n\n\ndef _SPECIAL_VALUE(z,table):\n if not math.isfinite(z.real)or not math.isfinite(z.imag):\n return table[_special_type(z.real)][_special_type(z.imag)]\n else :\n return None\n \ndef _special_type(x):\n if -inf ):\"\n misc_header=\"Miscellaneous help topics:\"\n undoc_header=\"Undocumented commands:\"\n nohelp=\"*** No help on %s\"\n use_rawinput=1\n \n def __init__(self,completekey='tab',stdin=None ,stdout=None ):\n ''\n\n\n\n\n\n\n\n\n \n if stdin is not None :\n self.stdin=stdin\n else :\n self.stdin=sys.stdin\n if stdout is not None :\n self.stdout=stdout\n else :\n self.stdout=sys.stdout\n self.cmdqueue=[]\n self.completekey=completekey\n \n def cmdloop(self,intro=None ):\n ''\n\n\n\n \n \n self.preloop()\n if self.use_rawinput and self.completekey:\n try :\n import readline\n self.old_completer=readline.get_completer()\n readline.set_completer(self.complete)\n readline.parse_and_bind(self.completekey+\": complete\")\n except ImportError:\n pass\n try :\n if intro is not None :\n self.intro=intro\n if self.intro:\n self.stdout.write(str(self.intro)+\"\\n\")\n stop=None\n while not stop:\n if self.cmdqueue:\n line=self.cmdqueue.pop(0)\n else :\n if self.use_rawinput:\n try :\n line=input(self.prompt)\n except EOFError:\n line='EOF'\n else :\n self.stdout.write(self.prompt)\n self.stdout.flush()\n line=self.stdin.readline()\n if not len(line):\n line='EOF'\n else :\n line=line.rstrip('\\r\\n')\n line=self.precmd(line)\n stop=self.onecmd(line)\n stop=self.postcmd(stop,line)\n self.postloop()\n finally :\n if self.use_rawinput and self.completekey:\n try :\n import readline\n readline.set_completer(self.old_completer)\n except ImportError:\n pass\n \n \n def precmd(self,line):\n ''\n\n\n \n return line\n \n def postcmd(self,stop,line):\n ''\n return stop\n \n def preloop(self):\n ''\n pass\n \n def postloop(self):\n ''\n\n\n \n pass\n \n def parseline(self,line):\n ''\n\n\n \n line=line.strip()\n if not line:\n return None ,None ,line\n elif line[0]=='?':\n line='help '+line[1:]\n elif line[0]=='!':\n if hasattr(self,'do_shell'):\n line='shell '+line[1:]\n else :\n return None ,None ,line\n i,n=0,len(line)\n while i 0:\n cmd,args,foo=self.parseline(line)\n if cmd =='':\n compfunc=self.completedefault\n else :\n try :\n compfunc=getattr(self,'complete_'+cmd)\n except AttributeError:\n compfunc=self.completedefault\n else :\n compfunc=self.completenames\n self.completion_matches=compfunc(text,line,begidx,endidx)\n try :\n return self.completion_matches[state]\n except IndexError:\n return None\n \n def get_names(self):\n \n \n return dir(self.__class__)\n \n def complete_help(self,*args):\n commands=set(self.completenames(*args))\n topics=set(a[5:]for a in self.get_names()\n if a.startswith('help_'+args[0]))\n return list(commands |topics)\n \n def do_help(self,arg):\n ''\n if arg:\n \n try :\n func=getattr(self,'help_'+arg)\n except AttributeError:\n try :\n doc=getattr(self,'do_'+arg).__doc__\n if doc:\n self.stdout.write(\"%s\\n\"%str(doc))\n return\n except AttributeError:\n pass\n self.stdout.write(\"%s\\n\"%str(self.nohelp %(arg,)))\n return\n func()\n else :\n names=self.get_names()\n cmds_doc=[]\n cmds_undoc=[]\n help={}\n for name in names:\n if name[:5]=='help_':\n help[name[5:]]=1\n names.sort()\n \n prevname=''\n for name in names:\n if name[:3]=='do_':\n if name ==prevname:\n continue\n prevname=name\n cmd=name[3:]\n if cmd in help:\n cmds_doc.append(cmd)\n del help[cmd]\n elif getattr(self,name).__doc__:\n cmds_doc.append(cmd)\n else :\n cmds_undoc.append(cmd)\n self.stdout.write(\"%s\\n\"%str(self.doc_leader))\n self.print_topics(self.doc_header,cmds_doc,15,80)\n self.print_topics(self.misc_header,list(help.keys()),15,80)\n self.print_topics(self.undoc_header,cmds_undoc,15,80)\n \n def print_topics(self,header,cmds,cmdlen,maxcol):\n if cmds:\n self.stdout.write(\"%s\\n\"%str(header))\n if self.ruler:\n self.stdout.write(\"%s\\n\"%str(self.ruler *len(header)))\n self.columnize(cmds,maxcol -1)\n self.stdout.write(\"\\n\")\n \n def columnize(self,list,displaywidth=80):\n ''\n\n\n\n \n if not list:\n self.stdout.write(\"\\n\")\n return\n \n nonstrings=[i for i in range(len(list))\n if not isinstance(list[i],str)]\n if nonstrings:\n raise TypeError(\"list[i] not a string for i in %s\"\n %\", \".join(map(str,nonstrings)))\n size=len(list)\n if size ==1:\n self.stdout.write('%s\\n'%str(list[0]))\n return\n \n for nrows in range(1,len(list)):\n ncols=(size+nrows -1)//nrows\n colwidths=[]\n totwidth=-2\n for col in range(ncols):\n colwidth=0\n for row in range(nrows):\n i=row+nrows *col\n if i >=size:\n break\n x=list[i]\n colwidth=max(colwidth,len(x))\n colwidths.append(colwidth)\n totwidth +=colwidth+2\n if totwidth >displaywidth:\n break\n if totwidth <=displaywidth:\n break\n else :\n nrows=len(list)\n ncols=1\n colwidths=[0]\n for row in range(nrows):\n texts=[]\n for col in range(ncols):\n i=row+nrows *col\n if i >=size:\n x=\"\"\n else :\n x=list[i]\n texts.append(x)\n while texts and not texts[-1]:\n del texts[-1]\n for col in range(len(texts)):\n texts[col]=texts[col].ljust(colwidths[col])\n self.stdout.write(\"%s\\n\"%str(\" \".join(texts)))\n", ["readline", "string", "sys"]], "code": [".py", "''\n\n\n\n\n\n\nimport sys\nimport traceback\nfrom codeop import CommandCompiler,compile_command\n\n__all__=[\"InteractiveInterpreter\",\"InteractiveConsole\",\"interact\",\n\"compile_command\"]\n\nclass InteractiveInterpreter:\n ''\n\n\n\n\n\n \n \n def __init__(self,locals=None ):\n ''\n\n\n\n\n\n\n \n if locals is None :\n locals={\"__name__\":\"__console__\",\"__doc__\":None }\n self.locals=locals\n self.compile=CommandCompiler()\n \n def runsource(self,source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n code=self.compile(source,filename,symbol)\n except (OverflowError,SyntaxError,ValueError):\n \n self.showsyntaxerror(filename)\n return False\n \n if code is None :\n \n return True\n \n \n self.runcode(code)\n return False\n \n def runcode(self,code):\n ''\n\n\n\n\n\n\n\n\n\n \n try :\n exec(code,self.locals)\n except SystemExit:\n raise\n except :\n self.showtraceback()\n \n def showsyntaxerror(self,filename=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n type,value,tb=sys.exc_info()\n sys.last_type=type\n sys.last_value=value\n sys.last_traceback=tb\n if filename and type is SyntaxError:\n \n try :\n msg,(dummy_filename,lineno,offset,line)=value.args\n except ValueError:\n \n pass\n else :\n \n value=SyntaxError(msg,(filename,lineno,offset,line))\n sys.last_value=value\n if sys.excepthook is sys.__excepthook__:\n lines=traceback.format_exception_only(type,value)\n self.write(''.join(lines))\n else :\n \n \n sys.excepthook(type,value,tb)\n \n def showtraceback(self):\n ''\n\n\n\n\n\n \n sys.last_type,sys.last_value,last_tb=ei=sys.exc_info()\n sys.last_traceback=last_tb\n try :\n lines=traceback.format_exception(ei[0],ei[1],last_tb.tb_next)\n if sys.excepthook is sys.__excepthook__:\n self.write(''.join(lines))\n else :\n \n \n sys.excepthook(ei[0],ei[1],last_tb)\n finally :\n last_tb=ei=None\n \n def write(self,data):\n ''\n\n\n\n\n \n sys.stderr.write(data)\n \n \nclass InteractiveConsole(InteractiveInterpreter):\n ''\n\n\n\n\n \n \n def __init__(self,locals=None ,filename=\"\"):\n ''\n\n\n\n\n\n\n\n \n InteractiveInterpreter.__init__(self,locals)\n self.filename=filename\n self.resetbuffer()\n \n def resetbuffer(self):\n ''\n self.buffer=[]\n \n def interact(self,banner=None ,exitmsg=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n sys.ps1\n except AttributeError:\n sys.ps1=\">>> \"\n try :\n sys.ps2\n except AttributeError:\n sys.ps2=\"... \"\n cprt='Type \"help\", \"copyright\", \"credits\" or \"license\" for more information.'\n if banner is None :\n self.write(\"Python %s on %s\\n%s\\n(%s)\\n\"%\n (sys.version,sys.platform,cprt,\n self.__class__.__name__))\n elif banner:\n self.write(\"%s\\n\"%str(banner))\n more=0\n while 1:\n try :\n if more:\n prompt=sys.ps2\n else :\n prompt=sys.ps1\n try :\n line=self.raw_input(prompt)\n except EOFError:\n self.write(\"\\n\")\n break\n else :\n more=self.push(line)\n except KeyboardInterrupt:\n self.write(\"\\nKeyboardInterrupt\\n\")\n self.resetbuffer()\n more=0\n if exitmsg is None :\n self.write('now exiting %s...\\n'%self.__class__.__name__)\n elif exitmsg !='':\n self.write('%s\\n'%exitmsg)\n \n def push(self,line):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self.buffer.append(line)\n source=\"\\n\".join(self.buffer)\n more=self.runsource(source,self.filename)\n if not more:\n self.resetbuffer()\n return more\n \n def raw_input(self,prompt=\"\"):\n ''\n\n\n\n\n\n\n\n\n \n return input(prompt)\n \n \n \ndef interact(banner=None ,readfunc=None ,local=None ,exitmsg=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n console=InteractiveConsole(local)\n if readfunc is not None :\n console.raw_input=readfunc\n else :\n try :\n import readline\n except ImportError:\n pass\n console.interact(banner,exitmsg)\n \n \nif __name__ ==\"__main__\":\n import argparse\n \n parser=argparse.ArgumentParser()\n parser.add_argument('-q',action='store_true',\n help=\"don't print version and copyright messages\")\n args=parser.parse_args()\n if args.q or sys.flags.quiet:\n banner=''\n else :\n banner=None\n interact(banner)\n", ["argparse", "codeop", "readline", "sys", "traceback"]], "codecs": [".py", "''\n\n\n\n\n\n\n\n\nimport builtins\nimport sys\n\n\n\ntry :\n from _codecs import *\nexcept ImportError as why:\n raise SystemError('Failed to load the builtin codecs: %s'%why)\n \n__all__=[\"register\",\"lookup\",\"open\",\"EncodedFile\",\"BOM\",\"BOM_BE\",\n\"BOM_LE\",\"BOM32_BE\",\"BOM32_LE\",\"BOM64_BE\",\"BOM64_LE\",\n\"BOM_UTF8\",\"BOM_UTF16\",\"BOM_UTF16_LE\",\"BOM_UTF16_BE\",\n\"BOM_UTF32\",\"BOM_UTF32_LE\",\"BOM_UTF32_BE\",\n\"CodecInfo\",\"Codec\",\"IncrementalEncoder\",\"IncrementalDecoder\",\n\"StreamReader\",\"StreamWriter\",\n\"StreamReaderWriter\",\"StreamRecoder\",\n\"getencoder\",\"getdecoder\",\"getincrementalencoder\",\n\"getincrementaldecoder\",\"getreader\",\"getwriter\",\n\"encode\",\"decode\",\"iterencode\",\"iterdecode\",\n\"strict_errors\",\"ignore_errors\",\"replace_errors\",\n\"xmlcharrefreplace_errors\",\n\"backslashreplace_errors\",\"namereplace_errors\",\n\"register_error\",\"lookup_error\"]\n\n\n\n\n\n\n\n\n\n\nBOM_UTF8=b'\\xef\\xbb\\xbf'\n\n\nBOM_LE=BOM_UTF16_LE=b'\\xff\\xfe'\n\n\nBOM_BE=BOM_UTF16_BE=b'\\xfe\\xff'\n\n\nBOM_UTF32_LE=b'\\xff\\xfe\\x00\\x00'\n\n\nBOM_UTF32_BE=b'\\x00\\x00\\xfe\\xff'\n\nif sys.byteorder =='little':\n\n\n BOM=BOM_UTF16=BOM_UTF16_LE\n \n \n BOM_UTF32=BOM_UTF32_LE\n \nelse :\n\n\n BOM=BOM_UTF16=BOM_UTF16_BE\n \n \n BOM_UTF32=BOM_UTF32_BE\n \n \nBOM32_LE=BOM_UTF16_LE\nBOM32_BE=BOM_UTF16_BE\nBOM64_LE=BOM_UTF32_LE\nBOM64_BE=BOM_UTF32_BE\n\n\n\n\nclass CodecInfo(tuple):\n ''\n \n \n \n \n \n \n \n _is_text_encoding=True\n \n def __new__(cls,encode,decode,streamreader=None ,streamwriter=None ,\n incrementalencoder=None ,incrementaldecoder=None ,name=None ,\n *,_is_text_encoding=None ):\n self=tuple.__new__(cls,(encode,decode,streamreader,streamwriter))\n self.name=name\n self.encode=encode\n self.decode=decode\n self.incrementalencoder=incrementalencoder\n self.incrementaldecoder=incrementaldecoder\n self.streamwriter=streamwriter\n self.streamreader=streamreader\n if _is_text_encoding is not None :\n self._is_text_encoding=_is_text_encoding\n return self\n \n def __repr__(self):\n return \"<%s.%s object for encoding %s at %#x>\"%\\\n (self.__class__.__module__,self.__class__.__qualname__,\n self.name,id(self))\n \nclass Codec:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def encode(self,input,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def decode(self,input,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \nclass IncrementalEncoder(object):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n ''\n\n\n\n\n\n \n self.errors=errors\n self.buffer=\"\"\n \n def encode(self,input,final=False ):\n ''\n\n \n raise NotImplementedError\n \n def reset(self):\n ''\n\n \n \n def getstate(self):\n ''\n\n \n return 0\n \n def setstate(self,state):\n ''\n\n\n \n \nclass BufferedIncrementalEncoder(IncrementalEncoder):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n IncrementalEncoder.__init__(self,errors)\n \n self.buffer=\"\"\n \n def _buffer_encode(self,input,errors,final):\n \n \n raise NotImplementedError\n \n def encode(self,input,final=False ):\n \n data=self.buffer+input\n (result,consumed)=self._buffer_encode(data,self.errors,final)\n \n self.buffer=data[consumed:]\n return result\n \n def reset(self):\n IncrementalEncoder.reset(self)\n self.buffer=\"\"\n \n def getstate(self):\n return self.buffer or 0\n \n def setstate(self,state):\n self.buffer=state or \"\"\n \nclass IncrementalDecoder(object):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n ''\n\n\n\n\n\n \n self.errors=errors\n \n def decode(self,input,final=False ):\n ''\n\n \n raise NotImplementedError\n \n def reset(self):\n ''\n\n \n \n def getstate(self):\n ''\n\n\n\n\n\n\n\n\n\n \n return (b\"\",0)\n \n def setstate(self,state):\n ''\n\n\n\n\n \n \nclass BufferedIncrementalDecoder(IncrementalDecoder):\n ''\n\n\n\n \n def __init__(self,errors='strict'):\n IncrementalDecoder.__init__(self,errors)\n \n self.buffer=b\"\"\n \n def _buffer_decode(self,input,errors,final):\n \n \n raise NotImplementedError\n \n def decode(self,input,final=False ):\n \n data=self.buffer+input\n (result,consumed)=self._buffer_decode(data,self.errors,final)\n \n self.buffer=data[consumed:]\n return result\n \n def reset(self):\n IncrementalDecoder.reset(self)\n self.buffer=b\"\"\n \n def getstate(self):\n \n return (self.buffer,0)\n \n def setstate(self,state):\n \n self.buffer=state[0]\n \n \n \n \n \n \n \n \nclass StreamWriter(Codec):\n\n def __init__(self,stream,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.errors=errors\n \n def write(self,object):\n \n ''\n \n data,consumed=self.encode(object,self.errors)\n self.stream.write(data)\n \n def writelines(self,list):\n \n ''\n\n \n self.write(''.join(list))\n \n def reset(self):\n \n ''\n\n\n\n\n\n\n \n pass\n \n def seek(self,offset,whence=0):\n self.stream.seek(offset,whence)\n if whence ==0 and offset ==0:\n self.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamReader(Codec):\n\n charbuffertype=str\n \n def __init__(self,stream,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.errors=errors\n self.bytebuffer=b\"\"\n self._empty_charbuffer=self.charbuffertype()\n self.charbuffer=self._empty_charbuffer\n self.linebuffer=None\n \n def decode(self,input,errors='strict'):\n raise NotImplementedError\n \n def read(self,size=-1,chars=-1,firstline=False ):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if self.linebuffer:\n self.charbuffer=self._empty_charbuffer.join(self.linebuffer)\n self.linebuffer=None\n \n if chars <0:\n \n \n chars=size\n \n \n while True :\n \n if chars >=0:\n if len(self.charbuffer)>=chars:\n break\n \n if size <0:\n newdata=self.stream.read()\n else :\n newdata=self.stream.read(size)\n \n data=self.bytebuffer+newdata\n if not data:\n break\n try :\n newchars,decodedbytes=self.decode(data,self.errors)\n except UnicodeDecodeError as exc:\n if firstline:\n newchars,decodedbytes=\\\n self.decode(data[:exc.start],self.errors)\n lines=newchars.splitlines(keepends=True )\n if len(lines)<=1:\n raise\n else :\n raise\n \n self.bytebuffer=data[decodedbytes:]\n \n self.charbuffer +=newchars\n \n if not newdata:\n break\n if chars <0:\n \n result=self.charbuffer\n self.charbuffer=self._empty_charbuffer\n else :\n \n result=self.charbuffer[:chars]\n self.charbuffer=self.charbuffer[chars:]\n return result\n \n def readline(self,size=None ,keepends=True ):\n \n ''\n\n\n\n\n\n \n \n \n if self.linebuffer:\n line=self.linebuffer[0]\n del self.linebuffer[0]\n if len(self.linebuffer)==1:\n \n \n self.charbuffer=self.linebuffer[0]\n self.linebuffer=None\n if not keepends:\n line=line.splitlines(keepends=False )[0]\n return line\n \n readsize=size or 72\n line=self._empty_charbuffer\n \n while True :\n data=self.read(readsize,firstline=True )\n if data:\n \n \n \n if (isinstance(data,str)and data.endswith(\"\\r\"))or\\\n (isinstance(data,bytes)and data.endswith(b\"\\r\")):\n data +=self.read(size=1,chars=1)\n \n line +=data\n lines=line.splitlines(keepends=True )\n if lines:\n if len(lines)>1:\n \n \n line=lines[0]\n del lines[0]\n if len(lines)>1:\n \n lines[-1]+=self.charbuffer\n self.linebuffer=lines\n self.charbuffer=None\n else :\n \n self.charbuffer=lines[0]+self.charbuffer\n if not keepends:\n line=line.splitlines(keepends=False )[0]\n break\n line0withend=lines[0]\n line0withoutend=lines[0].splitlines(keepends=False )[0]\n if line0withend !=line0withoutend:\n \n self.charbuffer=self._empty_charbuffer.join(lines[1:])+\\\n self.charbuffer\n if keepends:\n line=line0withend\n else :\n line=line0withoutend\n break\n \n if not data or size is not None :\n if line and not keepends:\n line=line.splitlines(keepends=False )[0]\n break\n if readsize <8000:\n readsize *=2\n return line\n \n def readlines(self,sizehint=None ,keepends=True ):\n \n ''\n\n\n\n\n\n\n\n\n \n data=self.read()\n return data.splitlines(keepends)\n \n def reset(self):\n \n ''\n\n\n\n\n\n \n self.bytebuffer=b\"\"\n self.charbuffer=self._empty_charbuffer\n self.linebuffer=None\n \n def seek(self,offset,whence=0):\n ''\n\n\n \n self.stream.seek(offset,whence)\n self.reset()\n \n def __next__(self):\n \n ''\n line=self.readline()\n if line:\n return line\n raise StopIteration\n \n def __iter__(self):\n return self\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamReaderWriter:\n\n ''\n\n\n\n\n\n\n \n \n encoding='unknown'\n \n def __init__(self,stream,Reader,Writer,errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.reader=Reader(stream,errors)\n self.writer=Writer(stream,errors)\n self.errors=errors\n \n def read(self,size=-1):\n \n return self.reader.read(size)\n \n def readline(self,size=None ):\n \n return self.reader.readline(size)\n \n def readlines(self,sizehint=None ):\n \n return self.reader.readlines(sizehint)\n \n def __next__(self):\n \n ''\n return next(self.reader)\n \n def __iter__(self):\n return self\n \n def write(self,data):\n \n return self.writer.write(data)\n \n def writelines(self,list):\n \n return self.writer.writelines(list)\n \n def reset(self):\n \n self.reader.reset()\n self.writer.reset()\n \n def seek(self,offset,whence=0):\n self.stream.seek(offset,whence)\n self.reader.reset()\n if whence ==0 and offset ==0:\n self.writer.reset()\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n \n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \nclass StreamRecoder:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n data_encoding='unknown'\n file_encoding='unknown'\n \n def __init__(self,stream,encode,decode,Reader,Writer,\n errors='strict'):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.stream=stream\n self.encode=encode\n self.decode=decode\n self.reader=Reader(stream,errors)\n self.writer=Writer(stream,errors)\n self.errors=errors\n \n def read(self,size=-1):\n \n data=self.reader.read(size)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def readline(self,size=None ):\n \n if size is None :\n data=self.reader.readline()\n else :\n data=self.reader.readline(size)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def readlines(self,sizehint=None ):\n \n data=self.reader.read()\n data,bytesencoded=self.encode(data,self.errors)\n return data.splitlines(keepends=True )\n \n def __next__(self):\n \n ''\n data=next(self.reader)\n data,bytesencoded=self.encode(data,self.errors)\n return data\n \n def __iter__(self):\n return self\n \n def write(self,data):\n \n data,bytesdecoded=self.decode(data,self.errors)\n return self.writer.write(data)\n \n def writelines(self,list):\n \n data=b''.join(list)\n data,bytesdecoded=self.decode(data,self.errors)\n return self.writer.write(data)\n \n def reset(self):\n \n self.reader.reset()\n self.writer.reset()\n \n def seek(self,offset,whence=0):\n \n \n self.reader.seek(offset,whence)\n self.writer.seek(offset,whence)\n \n def __getattr__(self,name,\n getattr=getattr):\n \n ''\n \n return getattr(self.stream,name)\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,tb):\n self.stream.close()\n \n \n \ndef open(filename,mode='r',encoding=None ,errors='strict',buffering=-1):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if encoding is not None and\\\n 'b'not in mode:\n \n mode=mode+'b'\n file=builtins.open(filename,mode,buffering)\n if encoding is None :\n return file\n \n try :\n info=lookup(encoding)\n srw=StreamReaderWriter(file,info.streamreader,info.streamwriter,errors)\n \n srw.encoding=encoding\n return srw\n except :\n file.close()\n raise\n \ndef EncodedFile(file,data_encoding,file_encoding=None ,errors='strict'):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if file_encoding is None :\n file_encoding=data_encoding\n data_info=lookup(data_encoding)\n file_info=lookup(file_encoding)\n sr=StreamRecoder(file,data_info.encode,data_info.decode,\n file_info.streamreader,file_info.streamwriter,errors)\n \n sr.data_encoding=data_encoding\n sr.file_encoding=file_encoding\n return sr\n \n \n \ndef getencoder(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).encode\n \ndef getdecoder(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).decode\n \ndef getincrementalencoder(encoding):\n\n ''\n\n\n\n\n\n \n encoder=lookup(encoding).incrementalencoder\n if encoder is None :\n raise LookupError(encoding)\n return encoder\n \ndef getincrementaldecoder(encoding):\n\n ''\n\n\n\n\n\n \n decoder=lookup(encoding).incrementaldecoder\n if decoder is None :\n raise LookupError(encoding)\n return decoder\n \ndef getreader(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).streamreader\n \ndef getwriter(encoding):\n\n ''\n\n\n\n\n \n return lookup(encoding).streamwriter\n \ndef iterencode(iterator,encoding,errors='strict',**kwargs):\n ''\n\n\n\n\n\n\n \n encoder=getincrementalencoder(encoding)(errors,**kwargs)\n for input in iterator:\n output=encoder.encode(input)\n if output:\n yield output\n output=encoder.encode(\"\",True )\n if output:\n yield output\n \ndef iterdecode(iterator,encoding,errors='strict',**kwargs):\n ''\n\n\n\n\n\n\n \n decoder=getincrementaldecoder(encoding)(errors,**kwargs)\n for input in iterator:\n output=decoder.decode(input)\n if output:\n yield output\n output=decoder.decode(b\"\",True )\n if output:\n yield output\n \n \n \ndef make_identity_dict(rng):\n\n ''\n\n\n\n\n \n return {i:i for i in rng}\n \ndef make_encoding_map(decoding_map):\n\n ''\n\n\n\n\n\n\n\n\n\n \n m={}\n for k,v in decoding_map.items():\n if not v in m:\n m[v]=k\n else :\n m[v]=None\n return m\n \n \n \ntry :\n strict_errors=lookup_error(\"strict\")\n ignore_errors=lookup_error(\"ignore\")\n replace_errors=lookup_error(\"replace\")\n xmlcharrefreplace_errors=lookup_error(\"xmlcharrefreplace\")\n backslashreplace_errors=lookup_error(\"backslashreplace\")\n namereplace_errors=lookup_error(\"namereplace\")\nexcept LookupError:\n\n strict_errors=None\n ignore_errors=None\n replace_errors=None\n xmlcharrefreplace_errors=None\n backslashreplace_errors=None\n namereplace_errors=None\n \n \n \n_false=0\nif _false:\n import encodings\n \n \n \nif __name__ =='__main__':\n\n\n sys.stdout=EncodedFile(sys.stdout,'latin-1','utf-8')\n \n \n sys.stdin=EncodedFile(sys.stdin,'utf-8','latin-1')\n", ["_codecs", "builtins", "encodings", "sys"]], "codeop": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport __future__\nimport warnings\n\n_features=[getattr(__future__,fname)\nfor fname in __future__.all_feature_names]\n\n__all__=[\"compile_command\",\"Compile\",\"CommandCompiler\"]\n\nPyCF_DONT_IMPLY_DEDENT=0x200\n\ndef _maybe_compile(compiler,source,filename,symbol):\n\n for line in source.split(\"\\n\"):\n line=line.strip()\n if line and line[0]!='#':\n break\n else :\n if symbol !=\"eval\":\n source=\"pass\"\n \n err=err1=err2=None\n code=code1=code2=None\n \n try :\n code=compiler(source,filename,symbol)\n except SyntaxError:\n pass\n \n \n \n with warnings.catch_warnings():\n warnings.simplefilter(\"error\")\n \n try :\n code1=compiler(source+\"\\n\",filename,symbol)\n except SyntaxError as e:\n err1=e\n \n try :\n code2=compiler(source+\"\\n\\n\",filename,symbol)\n except SyntaxError as e:\n err2=e\n \n try :\n if code:\n return code\n if not code1 and repr(err1)==repr(err2):\n raise err1\n finally :\n err1=err2=None\n \ndef _compile(source,filename,symbol):\n return compile(source,filename,symbol,PyCF_DONT_IMPLY_DEDENT)\n \ndef compile_command(source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _maybe_compile(_compile,source,filename,symbol)\n \nclass Compile:\n ''\n\n\n \n def __init__(self):\n self.flags=PyCF_DONT_IMPLY_DEDENT\n \n def __call__(self,source,filename,symbol):\n codeob=compile(source,filename,symbol,self.flags,True )\n for feature in _features:\n if codeob.co_flags&feature.compiler_flag:\n self.flags |=feature.compiler_flag\n return codeob\n \nclass CommandCompiler:\n ''\n\n\n\n \n \n def __init__(self,):\n self.compiler=Compile()\n \n def __call__(self,source,filename=\"\",symbol=\"single\"):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _maybe_compile(self.compiler,source,filename,symbol)\n", ["__future__", "warnings"]], "colorsys": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\"rgb_to_yiq\",\"yiq_to_rgb\",\"rgb_to_hls\",\"hls_to_rgb\",\n\"rgb_to_hsv\",\"hsv_to_rgb\"]\n\n\n\nONE_THIRD=1.0 /3.0\nONE_SIXTH=1.0 /6.0\nTWO_THIRD=2.0 /3.0\n\n\n\n\n\n\n\n\ndef rgb_to_yiq(r,g,b):\n y=0.30 *r+0.59 *g+0.11 *b\n i=0.74 *(r -y)-0.27 *(b -y)\n q=0.48 *(r -y)+0.41 *(b -y)\n return (y,i,q)\n \ndef yiq_to_rgb(y,i,q):\n\n\n\n\n r=y+0.9468822170900693 *i+0.6235565819861433 *q\n g=y -0.27478764629897834 *i -0.6356910791873801 *q\n b=y -1.1085450346420322 *i+1.7090069284064666 *q\n \n if r <0.0:\n r=0.0\n if g <0.0:\n g=0.0\n if b <0.0:\n b=0.0\n if r >1.0:\n r=1.0\n if g >1.0:\n g=1.0\n if b >1.0:\n b=1.0\n return (r,g,b)\n \n \n \n \n \n \n \ndef rgb_to_hls(r,g,b):\n maxc=max(r,g,b)\n minc=min(r,g,b)\n \n l=(minc+maxc)/2.0\n if minc ==maxc:\n return 0.0,l,0.0\n if l <=0.5:\n s=(maxc -minc)/(maxc+minc)\n else :\n s=(maxc -minc)/(2.0 -maxc -minc)\n rc=(maxc -r)/(maxc -minc)\n gc=(maxc -g)/(maxc -minc)\n bc=(maxc -b)/(maxc -minc)\n if r ==maxc:\n h=bc -gc\n elif g ==maxc:\n h=2.0+rc -bc\n else :\n h=4.0+gc -rc\n h=(h /6.0)%1.0\n return h,l,s\n \ndef hls_to_rgb(h,l,s):\n if s ==0.0:\n return l,l,l\n if l <=0.5:\n m2=l *(1.0+s)\n else :\n m2=l+s -(l *s)\n m1=2.0 *l -m2\n return (_v(m1,m2,h+ONE_THIRD),_v(m1,m2,h),_v(m1,m2,h -ONE_THIRD))\n \ndef _v(m1,m2,hue):\n hue=hue %1.0\n if hue MAX_INTERPOLATION_DEPTH:\n raise InterpolationDepthError(option,section,rawval)\n while rest:\n p=rest.find(\"%\")\n if p <0:\n accum.append(rest)\n return\n if p >0:\n accum.append(rest[:p])\n rest=rest[p:]\n \n c=rest[1:2]\n if c ==\"%\":\n accum.append(\"%\")\n rest=rest[2:]\n elif c ==\"(\":\n m=self._KEYCRE.match(rest)\n if m is None :\n raise InterpolationSyntaxError(option,section,\n \"bad interpolation variable reference %r\"%rest)\n var=parser.optionxform(m.group(1))\n rest=rest[m.end():]\n try :\n v=map[var]\n except KeyError:\n raise InterpolationMissingOptionError(\n option,section,rawval,var)from None\n if \"%\"in v:\n self._interpolate_some(parser,option,accum,v,\n section,map,depth+1)\n else :\n accum.append(v)\n else :\n raise InterpolationSyntaxError(\n option,section,\n \"'%%' must be followed by '%%' or '(', \"\n \"found: %r\"%(rest,))\n \n \nclass ExtendedInterpolation(Interpolation):\n ''\n \n \n _KEYCRE=re.compile(r\"\\$\\{([^}]+)\\}\")\n \n def before_get(self,parser,section,option,value,defaults):\n L=[]\n self._interpolate_some(parser,option,L,value,section,defaults,1)\n return ''.join(L)\n \n def before_set(self,parser,section,option,value):\n tmp_value=value.replace('$$','')\n tmp_value=self._KEYCRE.sub('',tmp_value)\n if '$'in tmp_value:\n raise ValueError(\"invalid interpolation syntax in %r at \"\n \"position %d\"%(value,tmp_value.find('$')))\n return value\n \n def _interpolate_some(self,parser,option,accum,rest,section,map,\n depth):\n rawval=parser.get(section,option,raw=True ,fallback=rest)\n if depth >MAX_INTERPOLATION_DEPTH:\n raise InterpolationDepthError(option,section,rawval)\n while rest:\n p=rest.find(\"$\")\n if p <0:\n accum.append(rest)\n return\n if p >0:\n accum.append(rest[:p])\n rest=rest[p:]\n \n c=rest[1:2]\n if c ==\"$\":\n accum.append(\"$\")\n rest=rest[2:]\n elif c ==\"{\":\n m=self._KEYCRE.match(rest)\n if m is None :\n raise InterpolationSyntaxError(option,section,\n \"bad interpolation variable reference %r\"%rest)\n path=m.group(1).split(':')\n rest=rest[m.end():]\n sect=section\n opt=option\n try :\n if len(path)==1:\n opt=parser.optionxform(path[0])\n v=map[opt]\n elif len(path)==2:\n sect=path[0]\n opt=parser.optionxform(path[1])\n v=parser.get(sect,opt,raw=True )\n else :\n raise InterpolationSyntaxError(\n option,section,\n \"More than one ':' found: %r\"%(rest,))\n except (KeyError,NoSectionError,NoOptionError):\n raise InterpolationMissingOptionError(\n option,section,rawval,\":\".join(path))from None\n if \"$\"in v:\n self._interpolate_some(parser,opt,accum,v,sect,\n dict(parser.items(sect,raw=True )),\n depth+1)\n else :\n accum.append(v)\n else :\n raise InterpolationSyntaxError(\n option,section,\n \"'$' must be followed by '$' or '{', \"\n \"found: %r\"%(rest,))\n \n \nclass LegacyInterpolation(Interpolation):\n ''\n \n \n _KEYCRE=re.compile(r\"%\\(([^)]*)\\)s|.\")\n \n def before_get(self,parser,section,option,value,vars):\n rawval=value\n depth=MAX_INTERPOLATION_DEPTH\n while depth:\n depth -=1\n if value and \"%(\"in value:\n replace=functools.partial(self._interpolation_replace,\n parser=parser)\n value=self._KEYCRE.sub(replace,value)\n try :\n value=value %vars\n except KeyError as e:\n raise InterpolationMissingOptionError(\n option,section,rawval,e.args[0])from None\n else :\n break\n if value and \"%(\"in value:\n raise InterpolationDepthError(option,section,rawval)\n return value\n \n def before_set(self,parser,section,option,value):\n return value\n \n @staticmethod\n def _interpolation_replace(match,parser):\n s=match.group(1)\n if s is None :\n return match.group()\n else :\n return \"%%(%s)s\"%parser.optionxform(s)\n \n \nclass RawConfigParser(MutableMapping):\n ''\n \n \n _SECT_TMPL=r\"\"\"\n \\[ # [\n (?P

[^]]+) # very permissive!\n \\] # ]\n \"\"\"\n _OPT_TMPL=r\"\"\"\n (?Pn'%(\n toprefix,num_chg)\n else :\n in_change=False\n \n if not flaglist:\n flaglist=[False ]\n next_id=['']\n next_href=['']\n last=0\n if context:\n fromlist=[' No Differences Found ']\n tolist=fromlist\n else :\n fromlist=tolist=[' Empty File ']\n \n if not flaglist[0]:\n next_href[0]='f'%toprefix\n \n next_href[last]='t'%(toprefix)\n \n return fromlist,tolist,flaglist,next_href,next_id\n \n def make_table(self,fromlines,tolines,fromdesc='',todesc='',context=False ,\n numlines=5):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n self._make_prefix()\n \n \n \n fromlines,tolines=self._tab_newline_replace(fromlines,tolines)\n \n \n if context:\n context_lines=numlines\n else :\n context_lines=None\n diffs=_mdiff(fromlines,tolines,context_lines,linejunk=self._linejunk,\n charjunk=self._charjunk)\n \n \n if self._wrapcolumn:\n diffs=self._line_wrapper(diffs)\n \n \n fromlist,tolist,flaglist=self._collect_lines(diffs)\n \n \n fromlist,tolist,flaglist,next_href,next_id=self._convert_flags(\n fromlist,tolist,flaglist,context,numlines)\n \n s=[]\n fmt=' %s%s'+\\\n '%s%s\\n'\n for i in range(len(flaglist)):\n if flaglist[i]is None :\n \n \n if i >0:\n s.append(' \\n \\n')\n else :\n s.append(fmt %(next_id[i],next_href[i],fromlist[i],\n next_href[i],tolist[i]))\n if fromdesc or todesc:\n header_row='%s%s%s%s'%(\n '
',\n '%s'%fromdesc,\n '
',\n '%s'%todesc)\n else :\n header_row=''\n \n table=self._table_template %dict(\n data_rows=''.join(s),\n header_row=header_row,\n prefix=self._prefix[1])\n \n return table.replace('\\0+','').\\\n replace('\\0-','').\\\n replace('\\0^','').\\\n replace('\\1','').\\\n replace('\\t',' ')\n \ndel re\n\ndef restore(delta,which):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n tag={1:\"- \",2:\"+ \"}[int(which)]\n except KeyError:\n raise ValueError('unknown delta choice (must be 1 or 2): %r'\n %which)from None\n prefixes=(\" \",tag)\n for line in delta:\n if line[:2]in prefixes:\n yield line[2:]\n \ndef _test():\n import doctest,difflib\n return doctest.testmod(difflib)\n \nif __name__ ==\"__main__\":\n _test()\n", ["collections", "difflib", "doctest", "heapq", "re", "types"]], "doctest": [".py", "\n\n\n\n\n\n\n\nr\"\"\"Module doctest -- a framework for running examples in docstrings.\n\nIn simplest use, end each module M to be tested with:\n\ndef _test():\n import doctest\n doctest.testmod()\n\nif __name__ == \"__main__\":\n _test()\n\nThen running the module as a script will cause the examples in the\ndocstrings to get executed and verified:\n\npython M.py\n\nThis won't display anything unless an example fails, in which case the\nfailing example(s) and the cause(s) of the failure(s) are printed to stdout\n(why not stderr? because stderr is a lame hack <0.2 wink>), and the final\nline of output is \"Test failed.\".\n\nRun it with the -v switch instead:\n\npython M.py -v\n\nand a detailed report of all examples tried is printed to stdout, along\nwith assorted summaries at the end.\n\nYou can force verbose mode by passing \"verbose=True\" to testmod, or prohibit\nit by passing \"verbose=False\". In either of those cases, sys.argv is not\nexamined by testmod.\n\nThere are a variety of other ways to run doctests, including integration\nwith the unittest framework, and support for running non-Python text\nfiles containing doctests. There are also many ways to override parts\nof doctest's default behaviors. See the Library Reference Manual for\ndetails.\n\"\"\"\n\n__docformat__='reStructuredText en'\n\n__all__=[\n\n'register_optionflag',\n'DONT_ACCEPT_TRUE_FOR_1',\n'DONT_ACCEPT_BLANKLINE',\n'NORMALIZE_WHITESPACE',\n'ELLIPSIS',\n'SKIP',\n'IGNORE_EXCEPTION_DETAIL',\n'COMPARISON_FLAGS',\n'REPORT_UDIFF',\n'REPORT_CDIFF',\n'REPORT_NDIFF',\n'REPORT_ONLY_FIRST_FAILURE',\n'REPORTING_FLAGS',\n'FAIL_FAST',\n\n\n'Example',\n'DocTest',\n\n'DocTestParser',\n\n'DocTestFinder',\n\n'DocTestRunner',\n'OutputChecker',\n'DocTestFailure',\n'UnexpectedException',\n'DebugRunner',\n\n'testmod',\n'testfile',\n'run_docstring_examples',\n\n'DocTestSuite',\n'DocFileSuite',\n'set_unittest_reportflags',\n\n'script_from_examples',\n'testsource',\n'debug_src',\n'debug',\n]\n\nimport __future__\nimport difflib\nimport inspect\nimport linecache\nimport os\nimport pdb\nimport re\nimport sys\nimport traceback\nimport unittest\nfrom io import StringIO\nfrom collections import namedtuple\n\nTestResults=namedtuple('TestResults','failed attempted')\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nOPTIONFLAGS_BY_NAME={}\ndef register_optionflag(name):\n\n return OPTIONFLAGS_BY_NAME.setdefault(name,1 <=2\n \n \n startpos,endpos=0,len(got)\n w=ws[0]\n if w:\n if got.startswith(w):\n startpos=len(w)\n del ws[0]\n else :\n return False\n w=ws[-1]\n if w:\n if got.endswith(w):\n endpos -=len(w)\n del ws[-1]\n else :\n return False\n \n if startpos >endpos:\n \n \n return False\n \n \n \n \n for w in ws:\n \n \n \n startpos=got.find(w,startpos,endpos)\n if startpos <0:\n return False\n startpos +=len(w)\n \n return True\n \ndef _comment_line(line):\n ''\n line=line.rstrip()\n if line:\n return '# '+line\n else :\n return '#'\n \ndef _strip_exception_details(msg):\n\n\n\n\n\n\n\n\n\n\n start,end=0,len(msg)\n \n i=msg.find(\"\\n\")\n if i >=0:\n end=i\n \n i=msg.find(':',0,end)\n if i >=0:\n end=i\n \n i=msg.rfind('.',0,end)\n if i >=0:\n start=i+1\n return msg[start:end]\n \nclass _OutputRedirectingPdb(pdb.Pdb):\n ''\n\n\n\n \n def __init__(self,out):\n self.__out=out\n self.__debugger_used=False\n \n pdb.Pdb.__init__(self,stdout=out,nosigint=True )\n \n self.use_rawinput=1\n \n def set_trace(self,frame=None ):\n self.__debugger_used=True\n if frame is None :\n frame=sys._getframe().f_back\n pdb.Pdb.set_trace(self,frame)\n \n def set_continue(self):\n \n \n if self.__debugger_used:\n pdb.Pdb.set_continue(self)\n \n def trace_dispatch(self,*args):\n \n save_stdout=sys.stdout\n sys.stdout=self.__out\n \n try :\n return pdb.Pdb.trace_dispatch(self,*args)\n finally :\n sys.stdout=save_stdout\n \n \ndef _module_relative_path(module,test_path):\n if not inspect.ismodule(module):\n raise TypeError('Expected a module: %r'%module)\n if test_path.startswith('/'):\n raise ValueError('Module-relative files may not have absolute paths')\n \n \n test_path=os.path.join(*(test_path.split('/')))\n \n \n if hasattr(module,'__file__'):\n \n basedir=os.path.split(module.__file__)[0]\n elif module.__name__ =='__main__':\n \n if len(sys.argv)>0 and sys.argv[0]!='':\n basedir=os.path.split(sys.argv[0])[0]\n else :\n basedir=os.curdir\n else :\n if hasattr(module,'__path__'):\n for directory in module.__path__:\n fullpath=os.path.join(directory,test_path)\n if os.path.exists(fullpath):\n return fullpath\n \n \n raise ValueError(\"Can't resolve paths relative to the module \"\n \"%r (it has no __file__)\"\n %module.__name__)\n \n \n return os.path.join(basedir,test_path)\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Example:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,source,want,exc_msg=None ,lineno=0,indent=0,\n options=None ):\n \n if not source.endswith('\\n'):\n source +='\\n'\n if want and not want.endswith('\\n'):\n want +='\\n'\n if exc_msg is not None and not exc_msg.endswith('\\n'):\n exc_msg +='\\n'\n \n self.source=source\n self.want=want\n self.lineno=lineno\n self.indent=indent\n if options is None :options={}\n self.options=options\n self.exc_msg=exc_msg\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self.source ==other.source and\\\n self.want ==other.want and\\\n self.lineno ==other.lineno and\\\n self.indent ==other.indent and\\\n self.options ==other.options and\\\n self.exc_msg ==other.exc_msg\n \n def __hash__(self):\n return hash((self.source,self.want,self.lineno,self.indent,\n self.exc_msg))\n \nclass DocTest:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,examples,globs,name,filename,lineno,docstring):\n ''\n\n\n \n assert not isinstance(examples,str),\\\n \"DocTest no longer accepts str; use DocTestParser instead\"\n self.examples=examples\n self.docstring=docstring\n self.globs=globs.copy()\n self.name=name\n self.filename=filename\n self.lineno=lineno\n \n def __repr__(self):\n if len(self.examples)==0:\n examples='no examples'\n elif len(self.examples)==1:\n examples='1 example'\n else :\n examples='%d examples'%len(self.examples)\n return ('<%s %s from %s:%s (%s)>'%\n (self.__class__.__name__,\n self.name,self.filename,self.lineno,examples))\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self.examples ==other.examples and\\\n self.docstring ==other.docstring and\\\n self.globs ==other.globs and\\\n self.name ==other.name and\\\n self.filename ==other.filename and\\\n self.lineno ==other.lineno\n \n def __hash__(self):\n return hash((self.docstring,self.name,self.filename,self.lineno))\n \n \n def __lt__(self,other):\n if not isinstance(other,DocTest):\n return NotImplemented\n return ((self.name,self.filename,self.lineno,id(self))\n <\n (other.name,other.filename,other.lineno,id(other)))\n \n \n \n \n \nclass DocTestParser:\n ''\n\n \n \n \n \n \n \n _EXAMPLE_RE=re.compile(r'''\n # Source consists of a PS1 line followed by zero or more PS2 lines.\n (?P\n (?:^(?P [ ]*) >>> .*) # PS1 line\n (?:\\n [ ]* \\.\\.\\. .*)*) # PS2 lines\n \\n?\n # Want consists of any non-blank lines that do not start with PS1.\n (?P (?:(?![ ]*$) # Not a blank line\n (?![ ]*>>>) # Not a line starting with PS1\n .+$\\n? # But any other line\n )*)\n ''',re.MULTILINE |re.VERBOSE)\n \n \n \n \n \n \n \n \n \n \n _EXCEPTION_RE=re.compile(r\"\"\"\n # Grab the traceback header. Different versions of Python have\n # said different things on the first traceback line.\n ^(?P Traceback\\ \\(\n (?: most\\ recent\\ call\\ last\n | innermost\\ last\n ) \\) :\n )\n \\s* $ # toss trailing whitespace on the header.\n (?P .*?) # don't blink: absorb stuff until...\n ^ (?P \\w+ .*) # a line *starts* with alphanum.\n \"\"\",re.VERBOSE |re.MULTILINE |re.DOTALL)\n \n \n \n _IS_BLANK_OR_COMMENT=re.compile(r'^[ ]*(#.*)?$').match\n \n def parse(self,string,name=''):\n ''\n\n\n\n\n\n \n string=string.expandtabs()\n \n min_indent=self._min_indent(string)\n if min_indent >0:\n string='\\n'.join([l[min_indent:]for l in string.split('\\n')])\n \n output=[]\n charno,lineno=0,0\n \n for m in self._EXAMPLE_RE.finditer(string):\n \n output.append(string[charno:m.start()])\n \n lineno +=string.count('\\n',charno,m.start())\n \n (source,options,want,exc_msg)=\\\n self._parse_example(m,name,lineno)\n \n if not self._IS_BLANK_OR_COMMENT(source):\n output.append(Example(source,want,exc_msg,\n lineno=lineno,\n indent=min_indent+len(m.group('indent')),\n options=options))\n \n lineno +=string.count('\\n',m.start(),m.end())\n \n charno=m.end()\n \n output.append(string[charno:])\n return output\n \n def get_doctest(self,string,globs,name,filename,lineno):\n ''\n\n\n\n\n\n\n \n return DocTest(self.get_examples(string,name),globs,\n name,filename,lineno,string)\n \n def get_examples(self,string,name=''):\n ''\n\n\n\n\n\n\n\n\n \n return [x for x in self.parse(string,name)\n if isinstance(x,Example)]\n \n def _parse_example(self,m,name,lineno):\n ''\n\n\n\n\n\n\n\n\n \n \n indent=len(m.group('indent'))\n \n \n \n source_lines=m.group('source').split('\\n')\n self._check_prompt_blank(source_lines,indent,name,lineno)\n self._check_prefix(source_lines[1:],' '*indent+'.',name,lineno)\n source='\\n'.join([sl[indent+4:]for sl in source_lines])\n \n \n \n \n want=m.group('want')\n want_lines=want.split('\\n')\n if len(want_lines)>1 and re.match(r' *$',want_lines[-1]):\n del want_lines[-1]\n self._check_prefix(want_lines,' '*indent,name,\n lineno+len(source_lines))\n want='\\n'.join([wl[indent:]for wl in want_lines])\n \n \n m=self._EXCEPTION_RE.match(want)\n if m:\n exc_msg=m.group('msg')\n else :\n exc_msg=None\n \n \n options=self._find_options(source,name,lineno)\n \n return source,options,want,exc_msg\n \n \n \n \n \n \n \n \n _OPTION_DIRECTIVE_RE=re.compile(r'#\\s*doctest:\\s*([^\\n\\'\"]*)$',\n re.MULTILINE)\n \n def _find_options(self,source,name,lineno):\n ''\n\n\n\n\n\n \n options={}\n \n for m in self._OPTION_DIRECTIVE_RE.finditer(source):\n option_strings=m.group(1).replace(',',' ').split()\n for option in option_strings:\n if (option[0]not in '+-'or\n option[1:]not in OPTIONFLAGS_BY_NAME):\n raise ValueError('line %r of the doctest for %s '\n 'has an invalid option: %r'%\n (lineno+1,name,option))\n flag=OPTIONFLAGS_BY_NAME[option[1:]]\n options[flag]=(option[0]=='+')\n if options and self._IS_BLANK_OR_COMMENT(source):\n raise ValueError('line %r of the doctest for %s has an option '\n 'directive on a line with no example: %r'%\n (lineno,name,source))\n return options\n \n \n \n _INDENT_RE=re.compile(r'^([ ]*)(?=\\S)',re.MULTILINE)\n \n def _min_indent(self,s):\n ''\n indents=[len(indent)for indent in self._INDENT_RE.findall(s)]\n if len(indents)>0:\n return min(indents)\n else :\n return 0\n \n def _check_prompt_blank(self,lines,indent,name,lineno):\n ''\n\n\n\n\n \n for i,line in enumerate(lines):\n if len(line)>=indent+4 and line[indent+3]!=' ':\n raise ValueError('line %r of the docstring for %s '\n 'lacks blank after %s: %r'%\n (lineno+i+1,name,\n line[indent:indent+3],line))\n \n def _check_prefix(self,lines,prefix,name,lineno):\n ''\n\n\n \n for i,line in enumerate(lines):\n if line and not line.startswith(prefix):\n raise ValueError('line %r of the docstring for %s has '\n 'inconsistent leading whitespace: %r'%\n (lineno+i+1,name,line))\n \n \n \n \n \n \nclass DocTestFinder:\n ''\n\n\n\n\n\n \n \n def __init__(self,verbose=False ,parser=DocTestParser(),\n recurse=True ,exclude_empty=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._parser=parser\n self._verbose=verbose\n self._recurse=recurse\n self._exclude_empty=exclude_empty\n \n def find(self,obj,name=None ,module=None ,globs=None ,extraglobs=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if name is None :\n name=getattr(obj,'__name__',None )\n if name is None :\n raise ValueError(\"DocTestFinder.find: name must be given \"\n \"when obj.__name__ doesn't exist: %r\"%\n (type(obj),))\n \n \n \n \n if module is False :\n module=None\n elif module is None :\n module=inspect.getmodule(obj)\n \n \n \n \n try :\n file=inspect.getsourcefile(obj)\n except TypeError:\n source_lines=None\n else :\n if not file:\n \n \n file=inspect.getfile(obj)\n if not file[0]+file[-2:]=='<]>':file=None\n if file is None :\n source_lines=None\n else :\n if module is not None :\n \n \n \n source_lines=linecache.getlines(file,module.__dict__)\n else :\n \n \n source_lines=linecache.getlines(file)\n if not source_lines:\n source_lines=None\n \n \n if globs is None :\n if module is None :\n globs={}\n else :\n globs=module.__dict__.copy()\n else :\n globs=globs.copy()\n if extraglobs is not None :\n globs.update(extraglobs)\n if '__name__'not in globs:\n globs['__name__']='__main__'\n \n \n tests=[]\n self._find(tests,obj,name,module,source_lines,globs,{})\n \n \n \n \n tests.sort()\n return tests\n \n def _from_module(self,module,object):\n ''\n\n\n \n if module is None :\n return True\n elif inspect.getmodule(object)is not None :\n return module is inspect.getmodule(object)\n elif inspect.isfunction(object):\n return module.__dict__ is object.__globals__\n elif inspect.ismethoddescriptor(object):\n if hasattr(object,'__objclass__'):\n obj_mod=object.__objclass__.__module__\n elif hasattr(object,'__module__'):\n obj_mod=object.__module__\n else :\n return True\n return module.__name__ ==obj_mod\n elif inspect.isclass(object):\n return module.__name__ ==object.__module__\n elif hasattr(object,'__module__'):\n return module.__name__ ==object.__module__\n elif isinstance(object,property):\n return True\n else :\n raise ValueError(\"object must be a class or function\")\n \n def _find(self,tests,obj,name,module,source_lines,globs,seen):\n ''\n\n\n \n if self._verbose:\n print('Finding tests in %s'%name)\n \n \n if id(obj)in seen:\n return\n seen[id(obj)]=1\n \n \n test=self._get_test(obj,name,module,globs,source_lines)\n if test is not None :\n tests.append(test)\n \n \n if inspect.ismodule(obj)and self._recurse:\n for valname,val in obj.__dict__.items():\n valname='%s.%s'%(name,valname)\n \n if ((inspect.isroutine(inspect.unwrap(val))\n or inspect.isclass(val))and\n self._from_module(module,val)):\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n \n if inspect.ismodule(obj)and self._recurse:\n for valname,val in getattr(obj,'__test__',{}).items():\n if not isinstance(valname,str):\n raise ValueError(\"DocTestFinder.find: __test__ keys \"\n \"must be strings: %r\"%\n (type(valname),))\n if not (inspect.isroutine(val)or inspect.isclass(val)or\n inspect.ismodule(val)or isinstance(val,str)):\n raise ValueError(\"DocTestFinder.find: __test__ values \"\n \"must be strings, functions, methods, \"\n \"classes, or modules: %r\"%\n (type(val),))\n valname='%s.__test__.%s'%(name,valname)\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n \n if inspect.isclass(obj)and self._recurse:\n for valname,val in obj.__dict__.items():\n \n if isinstance(val,staticmethod):\n val=getattr(obj,valname)\n if isinstance(val,classmethod):\n val=getattr(obj,valname).__func__\n \n \n if ((inspect.isroutine(val)or inspect.isclass(val)or\n isinstance(val,property))and\n self._from_module(module,val)):\n valname='%s.%s'%(name,valname)\n self._find(tests,val,valname,module,source_lines,\n globs,seen)\n \n def _get_test(self,obj,name,module,globs,source_lines):\n ''\n\n\n \n \n \n if isinstance(obj,str):\n docstring=obj\n else :\n try :\n if obj.__doc__ is None :\n docstring=''\n else :\n docstring=obj.__doc__\n if not isinstance(docstring,str):\n docstring=str(docstring)\n except (TypeError,AttributeError):\n docstring=''\n \n \n lineno=self._find_lineno(obj,source_lines)\n \n \n if self._exclude_empty and not docstring:\n return None\n \n \n if module is None :\n filename=None\n else :\n \n filename=getattr(module,'__file__',None )or module.__name__\n if filename[-4:]==\".pyc\":\n filename=filename[:-1]\n return self._parser.get_doctest(docstring,globs,name,\n filename,lineno)\n \n def _find_lineno(self,obj,source_lines):\n ''\n\n\n \n lineno=None\n \n \n if inspect.ismodule(obj):\n lineno=0\n \n \n \n \n if inspect.isclass(obj):\n if source_lines is None :\n return None\n pat=re.compile(r'^\\s*class\\s*%s\\b'%\n getattr(obj,'__name__','-'))\n for i,line in enumerate(source_lines):\n if pat.match(line):\n lineno=i\n break\n \n \n if inspect.ismethod(obj):obj=obj.__func__\n if inspect.isfunction(obj):obj=obj.__code__\n if inspect.istraceback(obj):obj=obj.tb_frame\n if inspect.isframe(obj):obj=obj.f_code\n if inspect.iscode(obj):\n lineno=getattr(obj,'co_firstlineno',None )-1\n \n \n \n \n \n \n if lineno is not None :\n if source_lines is None :\n return lineno+1\n pat=re.compile(r'(^|.*:)\\s*\\w*(\"|\\')')\n for lineno in range(lineno,len(source_lines)):\n if pat.match(source_lines[lineno]):\n return lineno\n \n \n return None\n \n \n \n \n \nclass DocTestRunner:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n DIVIDER=\"*\"*70\n \n def __init__(self,checker=None ,verbose=None ,optionflags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._checker=checker or OutputChecker()\n if verbose is None :\n verbose='-v'in sys.argv\n self._verbose=verbose\n self.optionflags=optionflags\n self.original_optionflags=optionflags\n \n \n self.tries=0\n self.failures=0\n self._name2ft={}\n \n \n self._fakeout=_SpoofOut()\n \n \n \n \n \n def report_start(self,out,test,example):\n ''\n\n\n \n if self._verbose:\n if example.want:\n out('Trying:\\n'+_indent(example.source)+\n 'Expecting:\\n'+_indent(example.want))\n else :\n out('Trying:\\n'+_indent(example.source)+\n 'Expecting nothing\\n')\n \n def report_success(self,out,test,example,got):\n ''\n\n\n \n if self._verbose:\n out(\"ok\\n\")\n \n def report_failure(self,out,test,example,got):\n ''\n\n \n out(self._failure_header(test,example)+\n self._checker.output_difference(example,got,self.optionflags))\n \n def report_unexpected_exception(self,out,test,example,exc_info):\n ''\n\n \n out(self._failure_header(test,example)+\n 'Exception raised:\\n'+_indent(_exception_traceback(exc_info)))\n \n def _failure_header(self,test,example):\n out=[self.DIVIDER]\n if test.filename:\n if test.lineno is not None and example.lineno is not None :\n lineno=test.lineno+example.lineno+1\n else :\n lineno='?'\n out.append('File \"%s\", line %s, in %s'%\n (test.filename,lineno,test.name))\n else :\n out.append('Line %s, in %s'%(example.lineno+1,test.name))\n out.append('Failed example:')\n source=example.source\n out.append(_indent(source))\n return '\\n'.join(out)\n \n \n \n \n \n def __run(self,test,compileflags,out):\n ''\n\n\n\n\n\n\n\n \n \n failures=tries=0\n \n \n \n original_optionflags=self.optionflags\n \n SUCCESS,FAILURE,BOOM=range(3)\n \n check=self._checker.check_output\n \n \n for examplenum,example in enumerate(test.examples):\n \n \n \n quiet=(self.optionflags&REPORT_ONLY_FIRST_FAILURE and\n failures >0)\n \n \n self.optionflags=original_optionflags\n if example.options:\n for (optionflag,val)in example.options.items():\n if val:\n self.optionflags |=optionflag\n else :\n self.optionflags &=~optionflag\n \n \n if self.optionflags&SKIP:\n continue\n \n \n tries +=1\n if not quiet:\n self.report_start(out,test,example)\n \n \n \n \n filename=''%(test.name,examplenum)\n \n \n \n \n try :\n \n exec(compile(example.source,filename,\"single\",\n compileflags,True ),test.globs)\n self.debugger.set_continue()\n exception=None\n except KeyboardInterrupt:\n raise\n except :\n exception=sys.exc_info()\n self.debugger.set_continue()\n \n got=self._fakeout.getvalue()\n self._fakeout.truncate(0)\n outcome=FAILURE\n \n \n \n if exception is None :\n if check(example.want,got,self.optionflags):\n outcome=SUCCESS\n \n \n else :\n exc_msg=traceback.format_exception_only(*exception[:2])[-1]\n if not quiet:\n got +=_exception_traceback(exception)\n \n \n \n if example.exc_msg is None :\n outcome=BOOM\n \n \n elif check(example.exc_msg,exc_msg,self.optionflags):\n outcome=SUCCESS\n \n \n elif self.optionflags&IGNORE_EXCEPTION_DETAIL:\n if check(_strip_exception_details(example.exc_msg),\n _strip_exception_details(exc_msg),\n self.optionflags):\n outcome=SUCCESS\n \n \n if outcome is SUCCESS:\n if not quiet:\n self.report_success(out,test,example,got)\n elif outcome is FAILURE:\n if not quiet:\n self.report_failure(out,test,example,got)\n failures +=1\n elif outcome is BOOM:\n if not quiet:\n self.report_unexpected_exception(out,test,example,\n exception)\n failures +=1\n else :\n assert False ,(\"unknown outcome\",outcome)\n \n if failures and self.optionflags&FAIL_FAST:\n break\n \n \n self.optionflags=original_optionflags\n \n \n self.__record_outcome(test,failures,tries)\n return TestResults(failures,tries)\n \n def __record_outcome(self,test,f,t):\n ''\n\n\n \n f2,t2=self._name2ft.get(test.name,(0,0))\n self._name2ft[test.name]=(f+f2,t+t2)\n self.failures +=f\n self.tries +=t\n \n __LINECACHE_FILENAME_RE=re.compile(r'.+)'\n r'\\[(?P\\d+)\\]>$')\n def __patched_linecache_getlines(self,filename,module_globals=None ):\n m=self.__LINECACHE_FILENAME_RE.match(filename)\n if m and m.group('name')==self.test.name:\n example=self.test.examples[int(m.group('examplenum'))]\n return example.source.splitlines(keepends=True )\n else :\n return self.save_linecache_getlines(filename,module_globals)\n \n def run(self,test,compileflags=None ,out=None ,clear_globs=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.test=test\n \n if compileflags is None :\n compileflags=_extract_future_flags(test.globs)\n \n save_stdout=sys.stdout\n if out is None :\n encoding=save_stdout.encoding\n if encoding is None or encoding.lower()=='utf-8':\n out=save_stdout.write\n else :\n \n def out(s):\n s=str(s.encode(encoding,'backslashreplace'),encoding)\n save_stdout.write(s)\n sys.stdout=self._fakeout\n \n \n \n \n \n \n save_trace=sys.gettrace()\n save_set_trace=pdb.set_trace\n self.debugger=_OutputRedirectingPdb(save_stdout)\n self.debugger.reset()\n pdb.set_trace=self.debugger.set_trace\n \n \n \n self.save_linecache_getlines=linecache.getlines\n linecache.getlines=self.__patched_linecache_getlines\n \n \n save_displayhook=sys.displayhook\n sys.displayhook=sys.__displayhook__\n \n try :\n return self.__run(test,compileflags,out)\n finally :\n sys.stdout=save_stdout\n pdb.set_trace=save_set_trace\n sys.settrace(save_trace)\n linecache.getlines=self.save_linecache_getlines\n sys.displayhook=save_displayhook\n if clear_globs:\n test.globs.clear()\n import builtins\n builtins._=None\n \n \n \n \n def summarize(self,verbose=None ):\n ''\n\n\n\n\n\n\n\n\n \n if verbose is None :\n verbose=self._verbose\n notests=[]\n passed=[]\n failed=[]\n totalt=totalf=0\n for x in self._name2ft.items():\n name,(f,t)=x\n assert f <=t\n totalt +=t\n totalf +=f\n if t ==0:\n notests.append(name)\n elif f ==0:\n passed.append((name,t))\n else :\n failed.append(x)\n if verbose:\n if notests:\n print(len(notests),\"items had no tests:\")\n notests.sort()\n for thing in notests:\n print(\" \",thing)\n if passed:\n print(len(passed),\"items passed all tests:\")\n passed.sort()\n for thing,count in passed:\n print(\" %3d tests in %s\"%(count,thing))\n if failed:\n print(self.DIVIDER)\n print(len(failed),\"items had failures:\")\n failed.sort()\n for thing,(f,t)in failed:\n print(\" %3d of %3d in %s\"%(f,t,thing))\n if verbose:\n print(totalt,\"tests in\",len(self._name2ft),\"items.\")\n print(totalt -totalf,\"passed and\",totalf,\"failed.\")\n if totalf:\n print(\"***Test Failed***\",totalf,\"failures.\")\n elif verbose:\n print(\"Test passed.\")\n return TestResults(totalf,totalt)\n \n \n \n \n def merge(self,other):\n d=self._name2ft\n for name,(f,t)in other._name2ft.items():\n if name in d:\n \n \n \n \n f2,t2=d[name]\n f=f+f2\n t=t+t2\n d[name]=f,t\n \nclass OutputChecker:\n ''\n\n\n\n\n\n \n def _toAscii(self,s):\n ''\n\n \n return str(s.encode('ASCII','backslashreplace'),\"ASCII\")\n \n def check_output(self,want,got,optionflags):\n ''\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n got=self._toAscii(got)\n want=self._toAscii(want)\n \n \n \n if got ==want:\n return True\n \n \n \n if not (optionflags&DONT_ACCEPT_TRUE_FOR_1):\n if (got,want)==(\"True\\n\",\"1\\n\"):\n return True\n if (got,want)==(\"False\\n\",\"0\\n\"):\n return True\n \n \n \n if not (optionflags&DONT_ACCEPT_BLANKLINE):\n \n want=re.sub(r'(?m)^%s\\s*?$'%re.escape(BLANKLINE_MARKER),\n '',want)\n \n \n got=re.sub(r'(?m)^[^\\S\\n]+$','',got)\n if got ==want:\n return True\n \n \n \n \n if optionflags&NORMALIZE_WHITESPACE:\n got=' '.join(got.split())\n want=' '.join(want.split())\n if got ==want:\n return True\n \n \n \n if optionflags&ELLIPSIS:\n if _ellipsis_match(want,got):\n return True\n \n \n return False\n \n \n def _do_a_fancy_diff(self,want,got,optionflags):\n \n if not optionflags&(REPORT_UDIFF |\n REPORT_CDIFF |\n REPORT_NDIFF):\n return False\n \n \n \n \n \n \n \n \n \n \n \n if optionflags&REPORT_NDIFF:\n return True\n \n \n return want.count('\\n')>2 and got.count('\\n')>2\n \n def output_difference(self,example,got,optionflags):\n ''\n\n\n\n\n \n want=example.want\n \n \n if not (optionflags&DONT_ACCEPT_BLANKLINE):\n got=re.sub('(?m)^[ ]*(?=\\n)',BLANKLINE_MARKER,got)\n \n \n if self._do_a_fancy_diff(want,got,optionflags):\n \n want_lines=want.splitlines(keepends=True )\n got_lines=got.splitlines(keepends=True )\n \n if optionflags&REPORT_UDIFF:\n diff=difflib.unified_diff(want_lines,got_lines,n=2)\n diff=list(diff)[2:]\n kind='unified diff with -expected +actual'\n elif optionflags&REPORT_CDIFF:\n diff=difflib.context_diff(want_lines,got_lines,n=2)\n diff=list(diff)[2:]\n kind='context diff with expected followed by actual'\n elif optionflags&REPORT_NDIFF:\n engine=difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)\n diff=list(engine.compare(want_lines,got_lines))\n kind='ndiff with -expected +actual'\n else :\n assert 0,'Bad diff option'\n return 'Differences (%s):\\n'%kind+_indent(''.join(diff))\n \n \n \n if want and got:\n return 'Expected:\\n%sGot:\\n%s'%(_indent(want),_indent(got))\n elif want:\n return 'Expected:\\n%sGot nothing\\n'%_indent(want)\n elif got:\n return 'Expected nothing\\nGot:\\n%s'%_indent(got)\n else :\n return 'Expected nothing\\nGot nothing\\n'\n \nclass DocTestFailure(Exception):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,test,example,got):\n self.test=test\n self.example=example\n self.got=got\n \n def __str__(self):\n return str(self.test)\n \nclass UnexpectedException(Exception):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,test,example,exc_info):\n self.test=test\n self.example=example\n self.exc_info=exc_info\n \n def __str__(self):\n return str(self.test)\n \nclass DebugRunner(DocTestRunner):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def run(self,test,compileflags=None ,out=None ,clear_globs=True ):\n r=DocTestRunner.run(self,test,compileflags,out,False )\n if clear_globs:\n test.globs.clear()\n return r\n \n def report_unexpected_exception(self,out,test,example,exc_info):\n raise UnexpectedException(test,example,exc_info)\n \n def report_failure(self,out,test,example,got):\n raise DocTestFailure(test,example,got)\n \n \n \n \n \n \n \n \nmaster=None\n\ndef testmod(m=None ,name=None ,globs=None ,verbose=None ,\nreport=True ,optionflags=0,extraglobs=None ,\nraise_on_error=False ,exclude_empty=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global master\n \n \n if m is None :\n \n \n \n m=sys.modules.get('__main__')\n \n \n if not inspect.ismodule(m):\n raise TypeError(\"testmod: module required; %r\"%(m,))\n \n \n if name is None :\n name=m.__name__\n \n \n finder=DocTestFinder(exclude_empty=exclude_empty)\n \n if raise_on_error:\n runner=DebugRunner(verbose=verbose,optionflags=optionflags)\n else :\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n \n for test in finder.find(m,name,globs=globs,extraglobs=extraglobs):\n runner.run(test)\n \n if report:\n runner.summarize()\n \n if master is None :\n master=runner\n else :\n master.merge(runner)\n \n return TestResults(runner.failures,runner.tries)\n \ndef testfile(filename,module_relative=True ,name=None ,package=None ,\nglobs=None ,verbose=None ,report=True ,optionflags=0,\nextraglobs=None ,raise_on_error=False ,parser=DocTestParser(),\nencoding=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global master\n \n if package and not module_relative:\n raise ValueError(\"Package may only be specified for module-\"\n \"relative paths.\")\n \n \n text,filename=_load_testfile(filename,package,module_relative,\n encoding or \"utf-8\")\n \n \n if name is None :\n name=os.path.basename(filename)\n \n \n if globs is None :\n globs={}\n else :\n globs=globs.copy()\n if extraglobs is not None :\n globs.update(extraglobs)\n if '__name__'not in globs:\n globs['__name__']='__main__'\n \n if raise_on_error:\n runner=DebugRunner(verbose=verbose,optionflags=optionflags)\n else :\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n \n \n test=parser.get_doctest(text,globs,name,filename,0)\n runner.run(test)\n \n if report:\n runner.summarize()\n \n if master is None :\n master=runner\n else :\n master.merge(runner)\n \n return TestResults(runner.failures,runner.tries)\n \ndef run_docstring_examples(f,globs,verbose=False ,name=\"NoName\",\ncompileflags=None ,optionflags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n finder=DocTestFinder(verbose=verbose,recurse=False )\n runner=DocTestRunner(verbose=verbose,optionflags=optionflags)\n for test in finder.find(f,name,globs=globs):\n runner.run(test,compileflags=compileflags)\n \n \n \n \n \n_unittest_reportflags=0\n\ndef set_unittest_reportflags(flags):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n global _unittest_reportflags\n \n if (flags&REPORTING_FLAGS)!=flags:\n raise ValueError(\"Only reporting flags allowed\",flags)\n old=_unittest_reportflags\n _unittest_reportflags=flags\n return old\n \n \nclass DocTestCase(unittest.TestCase):\n\n def __init__(self,test,optionflags=0,setUp=None ,tearDown=None ,\n checker=None ):\n \n unittest.TestCase.__init__(self)\n self._dt_optionflags=optionflags\n self._dt_checker=checker\n self._dt_test=test\n self._dt_setUp=setUp\n self._dt_tearDown=tearDown\n \n def setUp(self):\n test=self._dt_test\n \n if self._dt_setUp is not None :\n self._dt_setUp(test)\n \n def tearDown(self):\n test=self._dt_test\n \n if self._dt_tearDown is not None :\n self._dt_tearDown(test)\n \n test.globs.clear()\n \n def runTest(self):\n test=self._dt_test\n old=sys.stdout\n new=StringIO()\n optionflags=self._dt_optionflags\n \n if not (optionflags&REPORTING_FLAGS):\n \n \n optionflags |=_unittest_reportflags\n \n runner=DocTestRunner(optionflags=optionflags,\n checker=self._dt_checker,verbose=False )\n \n try :\n runner.DIVIDER=\"-\"*70\n failures,tries=runner.run(\n test,out=new.write,clear_globs=False )\n finally :\n sys.stdout=old\n \n if failures:\n raise self.failureException(self.format_failure(new.getvalue()))\n \n def format_failure(self,err):\n test=self._dt_test\n if test.lineno is None :\n lineno='unknown line number'\n else :\n lineno='%s'%test.lineno\n lname='.'.join(test.name.split('.')[-1:])\n return ('Failed doctest test for %s\\n'\n ' File \"%s\", line %s, in %s\\n\\n%s'\n %(test.name,test.filename,lineno,lname,err)\n )\n \n def debug(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n self.setUp()\n runner=DebugRunner(optionflags=self._dt_optionflags,\n checker=self._dt_checker,verbose=False )\n runner.run(self._dt_test,clear_globs=False )\n self.tearDown()\n \n def id(self):\n return self._dt_test.name\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self._dt_test ==other._dt_test and\\\n self._dt_optionflags ==other._dt_optionflags and\\\n self._dt_setUp ==other._dt_setUp and\\\n self._dt_tearDown ==other._dt_tearDown and\\\n self._dt_checker ==other._dt_checker\n \n def __hash__(self):\n return hash((self._dt_optionflags,self._dt_setUp,self._dt_tearDown,\n self._dt_checker))\n \n def __repr__(self):\n name=self._dt_test.name.split('.')\n return \"%s (%s)\"%(name[-1],'.'.join(name[:-1]))\n \n __str__=object.__str__\n \n def shortDescription(self):\n return \"Doctest: \"+self._dt_test.name\n \nclass SkipDocTestCase(DocTestCase):\n def __init__(self,module):\n self.module=module\n DocTestCase.__init__(self,None )\n \n def setUp(self):\n self.skipTest(\"DocTestSuite will not work with -O2 and above\")\n \n def test_skip(self):\n pass\n \n def shortDescription(self):\n return \"Skipping tests from %s\"%self.module.__name__\n \n __str__=shortDescription\n \n \nclass _DocTestSuite(unittest.TestSuite):\n\n def _removeTestAtIndex(self,index):\n pass\n \n \ndef DocTestSuite(module=None ,globs=None ,extraglobs=None ,test_finder=None ,\n**options):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if test_finder is None :\n test_finder=DocTestFinder()\n \n module=_normalize_module(module)\n tests=test_finder.find(module,globs=globs,extraglobs=extraglobs)\n \n if not tests and sys.flags.optimize >=2:\n \n suite=_DocTestSuite()\n suite.addTest(SkipDocTestCase(module))\n return suite\n \n tests.sort()\n suite=_DocTestSuite()\n \n for test in tests:\n if len(test.examples)==0:\n continue\n if not test.filename:\n filename=module.__file__\n if filename[-4:]==\".pyc\":\n filename=filename[:-1]\n test.filename=filename\n suite.addTest(DocTestCase(test,**options))\n \n return suite\n \nclass DocFileCase(DocTestCase):\n\n def id(self):\n return '_'.join(self._dt_test.name.split('.'))\n \n def __repr__(self):\n return self._dt_test.filename\n \n def format_failure(self,err):\n return ('Failed doctest test for %s\\n File \"%s\", line 0\\n\\n%s'\n %(self._dt_test.name,self._dt_test.filename,err)\n )\n \ndef DocFileTest(path,module_relative=True ,package=None ,\nglobs=None ,parser=DocTestParser(),\nencoding=None ,**options):\n if globs is None :\n globs={}\n else :\n globs=globs.copy()\n \n if package and not module_relative:\n raise ValueError(\"Package may only be specified for module-\"\n \"relative paths.\")\n \n \n doc,path=_load_testfile(path,package,module_relative,\n encoding or \"utf-8\")\n \n if \"__file__\"not in globs:\n globs[\"__file__\"]=path\n \n \n name=os.path.basename(path)\n \n \n test=parser.get_doctest(doc,globs,name,path,0)\n return DocFileCase(test,**options)\n \ndef DocFileSuite(*paths,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n suite=_DocTestSuite()\n \n \n \n \n if kw.get('module_relative',True ):\n kw['package']=_normalize_module(kw.get('package'))\n \n for path in paths:\n suite.addTest(DocFileTest(path,**kw))\n \n return suite\n \n \n \n \n \ndef script_from_examples(s):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n output=[]\n for piece in DocTestParser().parse(s):\n if isinstance(piece,Example):\n \n output.append(piece.source[:-1])\n \n want=piece.want\n if want:\n output.append('# Expected:')\n output +=['## '+l for l in want.split('\\n')[:-1]]\n else :\n \n output +=[_comment_line(l)\n for l in piece.split('\\n')[:-1]]\n \n \n while output and output[-1]=='#':\n output.pop()\n while output and output[0]=='#':\n output.pop(0)\n \n \n return '\\n'.join(output)+'\\n'\n \ndef testsource(module,name):\n ''\n\n\n\n\n \n module=_normalize_module(module)\n tests=DocTestFinder().find(module)\n test=[t for t in tests if t.name ==name]\n if not test:\n raise ValueError(name,\"not found in tests\")\n test=test[0]\n testsrc=script_from_examples(test.docstring)\n return testsrc\n \ndef debug_src(src,pm=False ,globs=None ):\n ''\n testsrc=script_from_examples(src)\n debug_script(testsrc,pm,globs)\n \ndef debug_script(src,pm=False ,globs=None ):\n ''\n import pdb\n \n if globs:\n globs=globs.copy()\n else :\n globs={}\n \n if pm:\n try :\n exec(src,globs,globs)\n except :\n print(sys.exc_info()[1])\n p=pdb.Pdb(nosigint=True )\n p.reset()\n p.interaction(None ,sys.exc_info()[2])\n else :\n pdb.Pdb(nosigint=True ).run(\"exec(%r)\"%src,globs,globs)\n \ndef debug(module,name,pm=False ):\n ''\n\n\n\n\n \n module=_normalize_module(module)\n testsrc=testsource(module,name)\n debug_script(testsrc,pm,module.__dict__)\n \n \n \n \nclass _TestClass:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,val):\n ''\n\n\n\n\n \n \n self.val=val\n \n def square(self):\n ''\n\n\n\n \n \n self.val=self.val **2\n return self\n \n def get(self):\n ''\n\n\n\n\n \n \n return self.val\n \n__test__={\"_TestClass\":_TestClass,\n\"string\":r\"\"\"\n Example of a string object, searched as-is.\n >>> x = 1; y = 2\n >>> x + y, x * y\n (3, 2)\n \"\"\",\n\n\"bool-int equivalence\":r\"\"\"\n In 2.2, boolean expressions displayed\n 0 or 1. By default, we still accept\n them. This can be disabled by passing\n DONT_ACCEPT_TRUE_FOR_1 to the new\n optionflags argument.\n >>> 4 == 4\n 1\n >>> 4 == 4\n True\n >>> 4 > 4\n 0\n >>> 4 > 4\n False\n \"\"\",\n\n\"blank lines\":r\"\"\"\n Blank lines can be marked with :\n >>> print('foo\\n\\nbar\\n')\n foo\n \n bar\n \n \"\"\",\n\n\"ellipsis\":r\"\"\"\n If the ellipsis flag is used, then '...' can be used to\n elide substrings in the desired output:\n >>> print(list(range(1000))) #doctest: +ELLIPSIS\n [0, 1, 2, ..., 999]\n \"\"\",\n\n\"whitespace normalization\":r\"\"\"\n If the whitespace normalization flag is used, then\n differences in whitespace are ignored.\n >>> print(list(range(30))) #doctest: +NORMALIZE_WHITESPACE\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,\n 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,\n 27, 28, 29]\n \"\"\",\n}\n\n\ndef _test():\n import argparse\n \n parser=argparse.ArgumentParser(description=\"doctest runner\")\n parser.add_argument('-v','--verbose',action='store_true',default=False ,\n help='print very verbose output for all tests')\n parser.add_argument('-o','--option',action='append',\n choices=OPTIONFLAGS_BY_NAME.keys(),default=[],\n help=('specify a doctest option flag to apply'\n ' to the test run; may be specified more'\n ' than once to apply multiple options'))\n parser.add_argument('-f','--fail-fast',action='store_true',\n help=('stop running tests after first failure (this'\n ' is a shorthand for -o FAIL_FAST, and is'\n ' in addition to any other -o options)'))\n parser.add_argument('file',nargs='+',\n help='file containing the tests to run')\n args=parser.parse_args()\n testfiles=args.file\n \n \n verbose=args.verbose\n options=0\n for option in args.option:\n options |=OPTIONFLAGS_BY_NAME[option]\n if args.fail_fast:\n options |=FAIL_FAST\n for filename in testfiles:\n if filename.endswith(\".py\"):\n \n \n \n dirname,filename=os.path.split(filename)\n sys.path.insert(0,dirname)\n m=__import__(filename[:-3])\n del sys.path[0]\n failures,_=testmod(m,verbose=verbose,optionflags=options)\n else :\n failures,_=testfile(filename,module_relative=False ,\n verbose=verbose,optionflags=options)\n if failures:\n return 1\n return 0\n \n \nif __name__ ==\"__main__\":\n sys.exit(_test())\n", ["__future__", "argparse", "builtins", "collections", "difflib", "inspect", "io", "linecache", "os", "pdb", "re", "sys", "traceback", "unittest"]], "enum": [".py", "import sys\nfrom types import MappingProxyType,DynamicClassAttribute\n\n\n__all__=[\n'EnumMeta',\n'Enum','IntEnum','Flag','IntFlag',\n'auto','unique',\n]\n\n\ndef _is_descriptor(obj):\n ''\n return (\n hasattr(obj,'__get__')or\n hasattr(obj,'__set__')or\n hasattr(obj,'__delete__'))\n \n \ndef _is_dunder(name):\n ''\n return (len(name)>4 and\n name[:2]==name[-2:]=='__'and\n name[2]!='_'and\n name[-3]!='_')\n \n \ndef _is_sunder(name):\n ''\n return (len(name)>2 and\n name[0]==name[-1]=='_'and\n name[1:2]!='_'and\n name[-2:-1]!='_')\n \n \ndef _make_class_unpicklable(cls):\n ''\n def _break_on_call_reduce(self,proto):\n raise TypeError('%r cannot be pickled'%self)\n cls.__reduce_ex__=_break_on_call_reduce\n cls.__module__=''\n \n_auto_null=object()\nclass auto:\n ''\n\n \n value=_auto_null\n \n \nclass _EnumDict(dict):\n ''\n\n\n\n\n \n def __init__(self):\n super().__init__()\n self._member_names=[]\n self._last_values=[]\n self._ignore=[]\n self._auto_called=False\n \n def __setitem__(self,key,value):\n ''\n\n\n\n\n\n\n \n if _is_sunder(key):\n if key not in (\n '_order_','_create_pseudo_member_',\n '_generate_next_value_','_missing_','_ignore_',\n ):\n raise ValueError('_names_ are reserved for future Enum use')\n if key =='_generate_next_value_':\n \n if self._auto_called:\n raise TypeError(\"_generate_next_value_ must be defined before members\")\n setattr(self,'_generate_next_value',value)\n elif key =='_ignore_':\n if isinstance(value,str):\n value=value.replace(',',' ').split()\n else :\n value=list(value)\n self._ignore=value\n already=set(value)&set(self._member_names)\n if already:\n raise ValueError('_ignore_ cannot specify already set names: %r'%(already,))\n elif _is_dunder(key):\n if key =='__order__':\n key='_order_'\n elif key in self._member_names:\n \n raise TypeError('Attempted to reuse key: %r'%key)\n elif key in self._ignore:\n pass\n elif not _is_descriptor(value):\n if key in self:\n \n raise TypeError('%r already defined as: %r'%(key,self[key]))\n if isinstance(value,auto):\n if value.value ==_auto_null:\n value.value=self._generate_next_value(key,1,len(self._member_names),self._last_values[:])\n self._auto_called=True\n value=value.value\n self._member_names.append(key)\n self._last_values.append(value)\n super().__setitem__(key,value)\n \n \n \n \n \nEnum=None\n\n\nclass EnumMeta(type):\n ''\n @classmethod\n def __prepare__(metacls,cls,bases):\n \n metacls._check_for_existing_members(cls,bases)\n \n enum_dict=_EnumDict()\n \n member_type,first_enum=metacls._get_mixins_(cls,bases)\n if first_enum is not None :\n enum_dict['_generate_next_value_']=getattr(first_enum,'_generate_next_value_',None )\n return enum_dict\n \n def __new__(metacls,cls,bases,classdict):\n \n \n \n \n \n \n classdict.setdefault('_ignore_',[]).append('_ignore_')\n ignore=classdict['_ignore_']\n for key in ignore:\n classdict.pop(key,None )\n member_type,first_enum=metacls._get_mixins_(cls,bases)\n __new__,save_new,use_args=metacls._find_new_(classdict,member_type,\n first_enum)\n \n \n \n enum_members={k:classdict[k]for k in classdict._member_names}\n for name in classdict._member_names:\n del classdict[name]\n \n \n _order_=classdict.pop('_order_',None )\n \n \n invalid_names=set(enum_members)&{'mro',''}\n if invalid_names:\n raise ValueError('Invalid enum member name: {0}'.format(\n ','.join(invalid_names)))\n \n \n if '__doc__'not in classdict:\n classdict['__doc__']='An enumeration.'\n \n \n enum_class=super().__new__(metacls,cls,bases,classdict)\n enum_class._member_names_=[]\n enum_class._member_map_={}\n enum_class._member_type_=member_type\n \n \n \n dynamic_attributes={k for c in enum_class.mro()\n for k,v in c.__dict__.items()\n if isinstance(v,DynamicClassAttribute)}\n \n \n enum_class._value2member_map_={}\n \n \n \n \n \n \n \n \n \n \n \n if '__reduce_ex__'not in classdict:\n if member_type is not object:\n methods=('__getnewargs_ex__','__getnewargs__',\n '__reduce_ex__','__reduce__')\n if not any(m in member_type.__dict__ for m in methods):\n _make_class_unpicklable(enum_class)\n \n \n \n \n \n for member_name in classdict._member_names:\n value=enum_members[member_name]\n if not isinstance(value,tuple):\n args=(value,)\n else :\n args=value\n if member_type is tuple:\n args=(args,)\n if not use_args:\n enum_member=__new__(enum_class)\n if not hasattr(enum_member,'_value_'):\n enum_member._value_=value\n else :\n enum_member=__new__(enum_class,*args)\n if not hasattr(enum_member,'_value_'):\n if member_type is object:\n enum_member._value_=value\n else :\n enum_member._value_=member_type(*args)\n value=enum_member._value_\n enum_member._name_=member_name\n enum_member.__objclass__=enum_class\n enum_member.__init__(*args)\n \n \n for name,canonical_member in enum_class._member_map_.items():\n if canonical_member._value_ ==enum_member._value_:\n enum_member=canonical_member\n break\n else :\n \n enum_class._member_names_.append(member_name)\n \n \n if member_name not in dynamic_attributes:\n setattr(enum_class,member_name,enum_member)\n \n enum_class._member_map_[member_name]=enum_member\n try :\n \n \n \n enum_class._value2member_map_[value]=enum_member\n except TypeError:\n pass\n \n \n \n \n \n for name in ('__repr__','__str__','__format__','__reduce_ex__'):\n if name in classdict:\n continue\n class_method=getattr(enum_class,name)\n obj_method=getattr(member_type,name,None )\n enum_method=getattr(first_enum,name,None )\n if obj_method is not None and obj_method is class_method:\n setattr(enum_class,name,enum_method)\n \n \n \n if Enum is not None :\n \n \n if save_new:\n enum_class.__new_member__=__new__\n enum_class.__new__=Enum.__new__\n \n \n if _order_ is not None :\n if isinstance(_order_,str):\n _order_=_order_.replace(',',' ').split()\n if _order_ !=enum_class._member_names_:\n raise TypeError('member order does not match _order_')\n \n return enum_class\n \n def __bool__(self):\n ''\n\n \n return True\n \n def __call__(cls,value,names=None ,*,module=None ,qualname=None ,type=None ,start=1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if names is None :\n return cls.__new__(cls,value)\n \n return cls._create_(value,names,module=module,qualname=qualname,type=type,start=start)\n \n def __contains__(cls,member):\n if not isinstance(member,Enum):\n raise TypeError(\n \"unsupported operand type(s) for 'in': '%s' and '%s'\"%(\n type(member).__qualname__,cls.__class__.__qualname__))\n return isinstance(member,cls)and member._name_ in cls._member_map_\n \n def __delattr__(cls,attr):\n \n \n if attr in cls._member_map_:\n raise AttributeError(\n \"%s: cannot delete Enum member.\"%cls.__name__)\n super().__delattr__(attr)\n \n def __dir__(self):\n return (['__class__','__doc__','__members__','__module__']+\n self._member_names_)\n \n def __getattr__(cls,name):\n ''\n\n\n\n\n\n\n \n if _is_dunder(name):\n raise AttributeError(name)\n try :\n return cls._member_map_[name]\n except KeyError:\n raise AttributeError(name)from None\n \n def __getitem__(cls,name):\n return cls._member_map_[name]\n \n def __iter__(cls):\n return (cls._member_map_[name]for name in cls._member_names_)\n \n def __len__(cls):\n return len(cls._member_names_)\n \n @property\n def __members__(cls):\n ''\n\n\n\n\n \n return MappingProxyType(cls._member_map_)\n \n def __repr__(cls):\n return \"\"%cls.__name__\n \n def __reversed__(cls):\n return (cls._member_map_[name]for name in reversed(cls._member_names_))\n \n def __setattr__(cls,name,value):\n ''\n\n\n\n\n\n \n member_map=cls.__dict__.get('_member_map_',{})\n if name in member_map:\n raise AttributeError('Cannot reassign members.')\n super().__setattr__(name,value)\n \n def _create_(cls,class_name,names,*,module=None ,qualname=None ,type=None ,start=1):\n ''\n\n\n\n\n\n\n\n\n\n \n metacls=cls.__class__\n bases=(cls,)if type is None else (type,cls)\n _,first_enum=cls._get_mixins_(cls,bases)\n classdict=metacls.__prepare__(class_name,bases)\n \n \n if isinstance(names,str):\n names=names.replace(',',' ').split()\n if isinstance(names,(tuple,list))and names and isinstance(names[0],str):\n original_names,names=names,[]\n last_values=[]\n for count,name in enumerate(original_names):\n value=first_enum._generate_next_value_(name,start,count,last_values[:])\n last_values.append(value)\n names.append((name,value))\n \n \n for item in names:\n if isinstance(item,str):\n member_name,member_value=item,names[item]\n else :\n member_name,member_value=item\n classdict[member_name]=member_value\n enum_class=metacls.__new__(metacls,class_name,bases,classdict)\n \n \n \n if module is None :\n try :\n module=sys._getframe(2).f_globals['__name__']\n except (AttributeError,ValueError,KeyError):\n pass\n if module is None :\n _make_class_unpicklable(enum_class)\n else :\n enum_class.__module__=module\n if qualname is not None :\n enum_class.__qualname__=qualname\n \n return enum_class\n \n def _convert_(cls,name,module,filter,source=None ):\n ''\n\n \n \n \n \n \n \n module_globals=vars(sys.modules[module])\n if source:\n source=vars(source)\n else :\n source=module_globals\n \n \n \n members=[\n (name,value)\n for name,value in source.items()\n if filter(name)]\n try :\n \n members.sort(key=lambda t:(t[1],t[0]))\n except TypeError:\n \n members.sort(key=lambda t:t[0])\n cls=cls(name,members,module=module)\n cls.__reduce_ex__=_reduce_ex_by_name\n module_globals.update(cls.__members__)\n module_globals[name]=cls\n return cls\n \n @staticmethod\n def _check_for_existing_members(class_name,bases):\n for chain in bases:\n for base in chain.__mro__:\n if issubclass(base,Enum)and base._member_names_:\n raise TypeError(\"%s: cannot extend enumeration %r\"%(class_name,base.__name__))\n \n @staticmethod\n def _get_mixins_(class_name,bases):\n ''\n\n\n\n\n \n if not bases:\n return object,Enum\n \n def _find_data_type(bases):\n data_types=[]\n for chain in bases:\n candidate=None\n for base in chain.__mro__:\n if base is object:\n continue\n elif '__new__'in base.__dict__:\n if issubclass(base,Enum):\n continue\n data_types.append(candidate or base)\n break\n elif not issubclass(base,Enum):\n candidate=base\n if len(data_types)>1:\n raise TypeError('%r: too many data types: %r'%(class_name,data_types))\n elif data_types:\n return data_types[0]\n else :\n return None\n \n \n \n first_enum=bases[-1]\n if not issubclass(first_enum,Enum):\n raise TypeError(\"new enumerations should be created as \"\n \"`EnumName([mixin_type, ...] [data_type,] enum_type)`\")\n member_type=_find_data_type(bases)or object\n if first_enum._member_names_:\n raise TypeError(\"Cannot extend enumerations\")\n return member_type,first_enum\n \n @staticmethod\n def _find_new_(classdict,member_type,first_enum):\n ''\n\n\n\n\n\n \n \n \n \n __new__=classdict.get('__new__',None )\n \n \n save_new=__new__ is not None\n \n if __new__ is None :\n \n \n for method in ('__new_member__','__new__'):\n for possible in (member_type,first_enum):\n target=getattr(possible,method,None )\n if target not in {\n None ,\n None .__new__,\n object.__new__,\n Enum.__new__,\n }:\n __new__=target\n break\n if __new__ is not None :\n break\n else :\n __new__=object.__new__\n \n \n \n \n if __new__ is object.__new__:\n use_args=False\n else :\n use_args=True\n return __new__,save_new,use_args\n \n \nclass Enum(metaclass=EnumMeta):\n ''\n\n\n\n \n def __new__(cls,value):\n \n \n \n if type(value)is cls:\n \n return value\n \n \n try :\n return cls._value2member_map_[value]\n except KeyError:\n \n pass\n except TypeError:\n \n for member in cls._member_map_.values():\n if member._value_ ==value:\n return member\n \n try :\n exc=None\n result=cls._missing_(value)\n except Exception as e:\n exc=e\n result=None\n if isinstance(result,cls):\n return result\n else :\n ve_exc=ValueError(\"%r is not a valid %s\"%(value,cls.__qualname__))\n if result is None and exc is None :\n raise ve_exc\n elif exc is None :\n exc=TypeError(\n 'error in %s._missing_: returned %r instead of None or a valid member'\n %(cls.__name__,result)\n )\n exc.__context__=ve_exc\n raise exc\n \n def _generate_next_value_(name,start,count,last_values):\n for last_value in reversed(last_values):\n try :\n return last_value+1\n except TypeError:\n pass\n else :\n return start\n \n @classmethod\n def _missing_(cls,value):\n return None\n \n def __repr__(self):\n return \"<%s.%s: %r>\"%(\n self.__class__.__name__,self._name_,self._value_)\n \n def __str__(self):\n return \"%s.%s\"%(self.__class__.__name__,self._name_)\n \n def __dir__(self):\n added_behavior=[\n m\n for cls in self.__class__.mro()\n for m in cls.__dict__\n if m[0]!='_'and m not in self._member_map_\n ]\n return (['__class__','__doc__','__module__']+added_behavior)\n \n def __format__(self,format_spec):\n \n \n \n \n \n str_overridden=type(self).__str__ !=Enum.__str__\n if self._member_type_ is object or str_overridden:\n cls=str\n val=str(self)\n \n else :\n cls=self._member_type_\n val=self._value_\n return cls.__format__(val,format_spec)\n \n def __hash__(self):\n return hash(self._name_)\n \n def __reduce_ex__(self,proto):\n return self.__class__,(self._value_,)\n \n \n \n \n \n \n \n \n @DynamicClassAttribute\n def name(self):\n ''\n return self._name_\n \n @DynamicClassAttribute\n def value(self):\n ''\n return self._value_\n \n \nclass IntEnum(int,Enum):\n ''\n \n \ndef _reduce_ex_by_name(self,proto):\n return self.name\n \nclass Flag(Enum):\n ''\n \n def _generate_next_value_(name,start,count,last_values):\n ''\n\n\n\n\n\n\n \n if not count:\n return start if start is not None else 1\n for last_value in reversed(last_values):\n try :\n high_bit=_high_bit(last_value)\n break\n except Exception:\n raise TypeError('Invalid Flag value: %r'%last_value)from None\n return 2 **(high_bit+1)\n \n @classmethod\n def _missing_(cls,value):\n original_value=value\n if value <0:\n value=~value\n possible_member=cls._create_pseudo_member_(value)\n if original_value <0:\n possible_member=~possible_member\n return possible_member\n \n @classmethod\n def _create_pseudo_member_(cls,value):\n ''\n\n \n pseudo_member=cls._value2member_map_.get(value,None )\n if pseudo_member is None :\n \n _,extra_flags=_decompose(cls,value)\n if extra_flags:\n raise ValueError(\"%r is not a valid %s\"%(value,cls.__qualname__))\n \n pseudo_member=object.__new__(cls)\n pseudo_member._name_=None\n pseudo_member._value_=value\n \n \n pseudo_member=cls._value2member_map_.setdefault(value,pseudo_member)\n return pseudo_member\n \n def __contains__(self,other):\n if not isinstance(other,self.__class__):\n raise TypeError(\n \"unsupported operand type(s) for 'in': '%s' and '%s'\"%(\n type(other).__qualname__,self.__class__.__qualname__))\n return other._value_&self._value_ ==other._value_\n \n def __repr__(self):\n cls=self.__class__\n if self._name_ is not None :\n return '<%s.%s: %r>'%(cls.__name__,self._name_,self._value_)\n members,uncovered=_decompose(cls,self._value_)\n return '<%s.%s: %r>'%(\n cls.__name__,\n '|'.join([str(m._name_ or m._value_)for m in members]),\n self._value_,\n )\n \n def __str__(self):\n cls=self.__class__\n if self._name_ is not None :\n return '%s.%s'%(cls.__name__,self._name_)\n members,uncovered=_decompose(cls,self._value_)\n if len(members)==1 and members[0]._name_ is None :\n return '%s.%r'%(cls.__name__,members[0]._value_)\n else :\n return '%s.%s'%(\n cls.__name__,\n '|'.join([str(m._name_ or m._value_)for m in members]),\n )\n \n def __bool__(self):\n return bool(self._value_)\n \n def __or__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return self.__class__(self._value_ |other._value_)\n \n def __and__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return self.__class__(self._value_&other._value_)\n \n def __xor__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return self.__class__(self._value_ ^other._value_)\n \n def __invert__(self):\n members,uncovered=_decompose(self.__class__,self._value_)\n inverted=self.__class__(0)\n for m in self.__class__:\n if m not in members and not (m._value_&self._value_):\n inverted=inverted |m\n return self.__class__(inverted)\n \n \nclass IntFlag(int,Flag):\n ''\n \n @classmethod\n def _missing_(cls,value):\n if not isinstance(value,int):\n raise ValueError(\"%r is not a valid %s\"%(value,cls.__qualname__))\n new_member=cls._create_pseudo_member_(value)\n return new_member\n \n @classmethod\n def _create_pseudo_member_(cls,value):\n pseudo_member=cls._value2member_map_.get(value,None )\n if pseudo_member is None :\n need_to_create=[value]\n \n _,extra_flags=_decompose(cls,value)\n \n while extra_flags:\n \n bit=_high_bit(extra_flags)\n flag_value=2 **bit\n if (flag_value not in cls._value2member_map_ and\n flag_value not in need_to_create\n ):\n need_to_create.append(flag_value)\n if extra_flags ==-flag_value:\n extra_flags=0\n else :\n extra_flags ^=flag_value\n for value in reversed(need_to_create):\n \n pseudo_member=int.__new__(cls,value)\n pseudo_member._name_=None\n pseudo_member._value_=value\n \n \n pseudo_member=cls._value2member_map_.setdefault(value,pseudo_member)\n return pseudo_member\n \n def __or__(self,other):\n if not isinstance(other,(self.__class__,int)):\n return NotImplemented\n result=self.__class__(self._value_ |self.__class__(other)._value_)\n return result\n \n def __and__(self,other):\n if not isinstance(other,(self.__class__,int)):\n return NotImplemented\n return self.__class__(self._value_&self.__class__(other)._value_)\n \n def __xor__(self,other):\n if not isinstance(other,(self.__class__,int)):\n return NotImplemented\n return self.__class__(self._value_ ^self.__class__(other)._value_)\n \n __ror__=__or__\n __rand__=__and__\n __rxor__=__xor__\n \n def __invert__(self):\n result=self.__class__(~self._value_)\n return result\n \n \ndef _high_bit(value):\n ''\n return value.bit_length()-1\n \ndef unique(enumeration):\n ''\n duplicates=[]\n for name,member in enumeration.__members__.items():\n if name !=member.name:\n duplicates.append((name,member.name))\n if duplicates:\n alias_details=', '.join(\n [\"%s -> %s\"%(alias,name)for (alias,name)in duplicates])\n raise ValueError('duplicate values found in %r: %s'%\n (enumeration,alias_details))\n return enumeration\n \ndef _decompose(flag,value):\n ''\n \n not_covered=value\n negative=value <0\n members=[]\n for member in flag:\n member_value=member.value\n if member_value and member_value&value ==member_value:\n members.append(member)\n not_covered &=~member_value\n if not negative:\n tmp=not_covered\n while tmp:\n flag_value=2 **_high_bit(tmp)\n if flag_value in flag._value2member_map_:\n members.append(flag._value2member_map_[flag_value])\n not_covered &=~flag_value\n tmp &=~flag_value\n if not members and value in flag._value2member_map_:\n members.append(flag._value2member_map_[value])\n members.sort(key=lambda m:m._value_,reverse=True )\n if len(members)>1 and members[0].value ==value:\n \n members.pop(0)\n return members,not_covered\n", ["sys", "types"]], "errno": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\nE2BIG=7\n\nEACCES=13\n\nEADDRINUSE=10048\n\nEADDRNOTAVAIL=10049\n\nEAFNOSUPPORT=10047\n\nEAGAIN=11\n\nEALREADY=10037\n\nEBADF=9\n\nEBADMSG=104\n\nEBUSY=16\n\nECANCELED=105\n\nECHILD=10\n\nECONNABORTED=10053\n\nECONNREFUSED=10061\n\nECONNRESET=10054\n\nEDEADLK=36\n\nEDEADLOCK=36\n\nEDESTADDRREQ=10039\n\nEDOM=33\n\nEDQUOT=10069\n\nEEXIST=17\n\nEFAULT=14\n\nEFBIG=27\n\nEHOSTDOWN=10064\n\nEHOSTUNREACH=10065\n\nEIDRM=111\n\nEILSEQ=42\n\nEINPROGRESS=10036\n\nEINTR=4\n\nEINVAL=22\n\nEIO=5\n\nEISCONN=10056\n\nEISDIR=21\n\nELOOP=10062\n\nEMFILE=24\n\nEMLINK=31\n\nEMSGSIZE=10040\n\nENAMETOOLONG=38\n\nENETDOWN=10050\n\nENETRESET=10052\n\nENETUNREACH=10051\n\nENFILE=23\n\nENOBUFS=10055\n\nENODATA=120\n\nENODEV=19\n\nENOENT=2\n\nENOEXEC=8\n\nENOLCK=39\n\nENOLINK=121\n\nENOMEM=12\n\nENOMSG=122\n\nENOPROTOOPT=10042\n\nENOSPC=28\n\nENOSR=124\n\nENOSTR=125\n\nENOSYS=40\n\nENOTCONN=10057\n\nENOTDIR=20\n\nENOTEMPTY=41\n\nENOTRECOVERABLE=127\n\nENOTSOCK=10038\n\nENOTSUP=129\n\nENOTTY=25\n\nENXIO=6\n\nEOPNOTSUPP=10045\n\nEOVERFLOW=132\n\nEOWNERDEAD=133\n\nEPERM=1\n\nEPFNOSUPPORT=10046\n\nEPIPE=32\n\nEPROTO=134\n\nEPROTONOSUPPORT=10043\n\nEPROTOTYPE=10041\n\nERANGE=34\n\nEREMOTE=10071\n\nEROFS=30\n\nESHUTDOWN=10058\n\nESOCKTNOSUPPORT=10044\n\nESPIPE=29\n\nESRCH=3\n\nESTALE=10070\n\nETIME=137\n\nETIMEDOUT=10060\n\nETOOMANYREFS=10059\n\nETXTBSY=139\n\nEUSERS=10068\n\nEWOULDBLOCK=10035\n\nEXDEV=18\n\nWSABASEERR=10000\n\nWSAEACCES=10013\n\nWSAEADDRINUSE=10048\n\nWSAEADDRNOTAVAIL=10049\n\nWSAEAFNOSUPPORT=10047\n\nWSAEALREADY=10037\n\nWSAEBADF=10009\n\nWSAECONNABORTED=10053\n\nWSAECONNREFUSED=10061\n\nWSAECONNRESET=10054\n\nWSAEDESTADDRREQ=10039\n\nWSAEDISCON=10101\n\nWSAEDQUOT=10069\n\nWSAEFAULT=10014\n\nWSAEHOSTDOWN=10064\n\nWSAEHOSTUNREACH=10065\n\nWSAEINPROGRESS=10036\n\nWSAEINTR=10004\n\nWSAEINVAL=10022\n\nWSAEISCONN=10056\n\nWSAELOOP=10062\n\nWSAEMFILE=10024\n\nWSAEMSGSIZE=10040\n\nWSAENAMETOOLONG=10063\n\nWSAENETDOWN=10050\n\nWSAENETRESET=10052\n\nWSAENETUNREACH=10051\n\nWSAENOBUFS=10055\n\nWSAENOPROTOOPT=10042\n\nWSAENOTCONN=10057\n\nWSAENOTEMPTY=10066\n\nWSAENOTSOCK=10038\n\nWSAEOPNOTSUPP=10045\n\nWSAEPFNOSUPPORT=10046\n\nWSAEPROCLIM=10067\n\nWSAEPROTONOSUPPORT=10043\n\nWSAEPROTOTYPE=10041\n\nWSAEREMOTE=10071\n\nWSAESHUTDOWN=10058\n\nWSAESOCKTNOSUPPORT=10044\n\nWSAESTALE=10070\n\nWSAETIMEDOUT=10060\n\nWSAETOOMANYREFS=10059\n\nWSAEUSERS=10068\n\nWSAEWOULDBLOCK=10035\n\nWSANOTINITIALISED=10093\n\nWSASYSNOTREADY=10091\n\nWSAVERNOTSUPPORTED=10092\n\nerrorcode={v:k for (k,v)in globals().items()if k ==k.upper()}\n", []], "external_import": [".py", "import os\nfrom browser import doc\nimport urllib.request\n\n\n\n\n\nclass ModuleFinder:\n def __init__(self,path_entry):\n print(\"external_import here..\")\n \n self._module=None\n if path_entry.startswith('http://'):\n self.path_entry=path_entry\n else :\n raise ImportError()\n \n def __str__(self):\n return '<%s for \"%s\">'%(self.__class__.__name__,self.path_entry)\n \n def find_module(self,fullname,path=None ):\n path=path or self.path_entry\n \n for _ext in ['js','pyj','py']:\n _fp,_url,_headers=urllib.request.urlopen(path+'/'+'%s.%s'%(fullname,_ext))\n self._module=_fp.read()\n _fp.close()\n if self._module is not None :\n print(\"module found at %s:%s\"%(path,fullname))\n return ModuleLoader(path,fullname,self._module)\n \n print('module %s not found'%fullname)\n raise ImportError()\n return None\n \nclass ModuleLoader:\n ''\n \n def __init__(self,filepath,name,module_source):\n self._filepath=filepath\n self._name=name\n self._module_source=module_source\n \n def get_source(self):\n return self._module_source\n \n def is_package(self):\n return '.'in self._name\n \n def load_module(self):\n if self._name in sys.modules:\n \n mod=sys.modules[self._name]\n return mod\n \n _src=self.get_source()\n if self._filepath.endswith('.js'):\n mod=JSObject(import_js_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.py'):\n mod=JSObject(import_py_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.pyj'):\n mod=JSObject(import_pyj_module(_src,self._filepath,self._name))\n else :\n raise ImportError('Invalid Module: %s'%self._filepath)\n \n \n mod.__file__=self._filepath\n mod.__name__=self._name\n mod.__path__=os.path.abspath(self._filepath)\n mod.__loader__=self\n mod.__package__='.'.join(self._name.split('.')[:-1])\n \n if self.is_package():\n print('adding path for package')\n \n \n mod.__path__=[self._filepath]\n else :\n print('imported as regular module')\n \n print('creating a new module object for \"%s\"'%self._name)\n sys.modules.setdefault(self._name,mod)\n JSObject(__BRYTHON__.imported)[self._name]=mod\n \n return mod\n", ["browser", "os", "urllib.request"]], "faulthandler": [".py", "''\n\n\n_EXCEPTION_ACCESS_VIOLATION=-1073741819\n\n_EXCEPTION_INT_DIVIDE_BY_ZERO=-1073741676\n\n_EXCEPTION_NONCONTINUABLE=1\n\n_EXCEPTION_NONCONTINUABLE_EXCEPTION=-1073741787\n\n_EXCEPTION_STACK_OVERFLOW=-1073741571\n\nclass __loader__(object):\n ''\n\n\n\n\n \n \n \n __delattr__=\"\"\n \n __dict__=\"{'__module__': '_frozen_importlib', '__doc__': 'Meta path import for built-in modules.\\n\\n All methods are either class or static methods to avoid the need to\\n instantiate the class.\\n\\n ', 'module_repr': , 'find_spec': , 'find_module': , 'create_module': , 'exec_module': , 'get_code': , 'get_source': , 'is_package': , 'load_module': , '__dict__': , '__weakref__': }\"\n \n __dir__=\"\"\n \n __eq__=\"\"\n \n __format__=\"\"\n \n __ge__=\"\"\n \n __getattribute__=\"\"\n \n __gt__=\"\"\n \n __hash__=\"\"\n \n __init__=\"\"\n \n def __init_subclass__(*args,**kw):\n ''\n\n\n \n pass\n \n __le__=\"\"\n \n __lt__=\"\"\n \n __module__=\"\"\"_frozen_importlib\"\"\"\n \n __ne__=\"\"\n \n def __new__(*args,**kw):\n ''\n pass\n \n __reduce__=\"\"\n \n __reduce_ex__=\"\"\n \n __repr__=\"\"\n \n __setattr__=\"\"\n \n __sizeof__=\"\"\n \n __str__=\"\"\n \n def __subclasshook__(*args,**kw):\n ''\n\n\n\n\n \n pass\n \n __weakref__=\"\"\n \n create_module=\">\"\n \n exec_module=\">\"\n \n find_module=\">\"\n \n find_spec=\">\"\n \n get_code=\">\"\n \n get_source=\">\"\n \n is_package=\">\"\n \n load_module=\">\"\n \n def module_repr(*args,**kw):\n ''\n\n\n \n pass\n__spec__=\"ModuleSpec(name='faulthandler', loader=, origin='built-in')\"\n\ndef _fatal_error(*args,**kw):\n ''\n pass\n \ndef _fatal_error_c_thread(*args,**kw):\n ''\n pass\n \ndef _raise_exception(*args,**kw):\n ''\n pass\n \ndef _read_null(*args,**kw):\n ''\n pass\n \ndef _sigabrt(*args,**kw):\n ''\n pass\n \ndef _sigfpe(*args,**kw):\n ''\n pass\n \ndef _sigsegv(*args,**kw):\n ''\n pass\n \ndef cancel_dump_traceback_later(*args,**kw):\n ''\n pass\n \ndef disable(*args,**kw):\n ''\n pass\n \ndef dump_traceback(*args,**kw):\n ''\n pass\n \ndef dump_traceback_later(*args,**kw):\n ''\n \n pass\n \ndef enable(*args,**kw):\n ''\n pass\n \ndef is_enabled(*args,**kw):\n ''\n pass\n", []], "fnmatch": [".py", "''\n\n\n\n\n\n\n\n\n\n\nimport os\nimport posixpath\nimport re\nimport functools\n\n__all__=[\"filter\",\"fnmatch\",\"fnmatchcase\",\"translate\"]\n\n\n\nfrom itertools import count\n_nextgroupnum=count().__next__\ndel count\n\ndef fnmatch(name,pat):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n name=os.path.normcase(name)\n pat=os.path.normcase(pat)\n return fnmatchcase(name,pat)\n \n@functools.lru_cache(maxsize=256,typed=True )\ndef _compile_pattern(pat):\n if isinstance(pat,bytes):\n pat_str=str(pat,'ISO-8859-1')\n res_str=translate(pat_str)\n res=bytes(res_str,'ISO-8859-1')\n else :\n res=translate(pat)\n return re.compile(res).match\n \ndef filter(names,pat):\n ''\n result=[]\n pat=os.path.normcase(pat)\n match=_compile_pattern(pat)\n if os.path is posixpath:\n \n for name in names:\n if match(name):\n result.append(name)\n else :\n for name in names:\n if match(os.path.normcase(name)):\n result.append(name)\n return result\n \ndef fnmatchcase(name,pat):\n ''\n\n\n\n \n match=_compile_pattern(pat)\n return match(name)is not None\n \n \ndef translate(pat):\n ''\n\n\n \n \n STAR=object()\n res=[]\n add=res.append\n i,n=0,len(pat)\n while i =n:\n add('\\\\[')\n else :\n stuff=pat[i:j]\n if '--'not in stuff:\n stuff=stuff.replace('\\\\',r'\\\\')\n else :\n chunks=[]\n k=i+2 if pat[i]=='!'else i+1\n while True :\n k=pat.find('-',k,j)\n if k <0:\n break\n chunks.append(pat[i:k])\n i=k+1\n k=k+3\n chunks.append(pat[i:j])\n \n \n stuff='-'.join(s.replace('\\\\',r'\\\\').replace('-',r'\\-')\n for s in chunks)\n \n stuff=re.sub(r'([&~|])',r'\\\\\\1',stuff)\n i=j+1\n if stuff[0]=='!':\n stuff='^'+stuff[1:]\n elif stuff[0]in ('^','['):\n stuff='\\\\'+stuff\n add(f'[{stuff}]')\n else :\n add(re.escape(c))\n assert i ==n\n \n \n inp=res\n res=[]\n add=res.append\n i,n=0,len(inp)\n \n while i .*?{fixed}))(?P=g{groupnum})\")\n assert i ==n\n res=\"\".join(res)\n return fr'(?s:{res})\\Z'\n", ["functools", "itertools", "os", "posixpath", "re"]], "formatter": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport warnings\nwarnings.warn('the formatter module is deprecated',DeprecationWarning,\nstacklevel=2)\n\n\nAS_IS=None\n\n\nclass NullFormatter:\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,writer=None ):\n if writer is None :\n writer=NullWriter()\n self.writer=writer\n def end_paragraph(self,blankline):pass\n def add_line_break(self):pass\n def add_hor_rule(self,*args,**kw):pass\n def add_label_data(self,format,counter,blankline=None ):pass\n def add_flowing_data(self,data):pass\n def add_literal_data(self,data):pass\n def flush_softspace(self):pass\n def push_alignment(self,align):pass\n def pop_alignment(self):pass\n def push_font(self,x):pass\n def pop_font(self):pass\n def push_margin(self,margin):pass\n def pop_margin(self):pass\n def set_spacing(self,spacing):pass\n def push_style(self,*styles):pass\n def pop_style(self,n=1):pass\n def assert_line_data(self,flag=1):pass\n \n \nclass AbstractFormatter:\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n def __init__(self,writer):\n self.writer=writer\n self.align=None\n self.align_stack=[]\n self.font_stack=[]\n self.margin_stack=[]\n self.spacing=None\n self.style_stack=[]\n self.nospace=1\n self.softspace=0\n self.para_end=1\n self.parskip=0\n self.hard_break=1\n self.have_label=0\n \n def end_paragraph(self,blankline):\n if not self.hard_break:\n self.writer.send_line_break()\n self.have_label=0\n if self.parskip 0:\n label=label+self.format_letter(c,counter)\n elif c in 'iI':\n if counter >0:\n label=label+self.format_roman(c,counter)\n else :\n label=label+c\n return label\n \n def format_letter(self,case,counter):\n label=''\n while counter >0:\n counter,x=divmod(counter -1,26)\n \n \n \n s=chr(ord(case)+x)\n label=s+label\n return label\n \n def format_roman(self,case,counter):\n ones=['i','x','c','m']\n fives=['v','l','d']\n label,index='',0\n \n while counter >0:\n counter,x=divmod(counter,10)\n if x ==9:\n label=ones[index]+ones[index+1]+label\n elif x ==4:\n label=ones[index]+fives[index]+label\n else :\n if x >=5:\n s=fives[index]\n x=x -5\n else :\n s=''\n s=s+ones[index]*x\n label=s+label\n index=index+1\n if case =='I':\n return label.upper()\n return label\n \n def add_flowing_data(self,data):\n if not data:return\n prespace=data[:1].isspace()\n postspace=data[-1:].isspace()\n data=\" \".join(data.split())\n if self.nospace and not data:\n return\n elif prespace or self.softspace:\n if not data:\n if not self.nospace:\n self.softspace=1\n self.parskip=0\n return\n if not self.nospace:\n data=' '+data\n self.hard_break=self.nospace=self.para_end=\\\n self.parskip=self.have_label=0\n self.softspace=postspace\n self.writer.send_flowing_data(data)\n \n def add_literal_data(self,data):\n if not data:return\n if self.softspace:\n self.writer.send_flowing_data(\" \")\n self.hard_break=data[-1:]=='\\n'\n self.nospace=self.para_end=self.softspace=\\\n self.parskip=self.have_label=0\n self.writer.send_literal_data(data)\n \n def flush_softspace(self):\n if self.softspace:\n self.hard_break=self.para_end=self.parskip=\\\n self.have_label=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n \n def push_alignment(self,align):\n if align and align !=self.align:\n self.writer.new_alignment(align)\n self.align=align\n self.align_stack.append(align)\n else :\n self.align_stack.append(self.align)\n \n def pop_alignment(self):\n if self.align_stack:\n del self.align_stack[-1]\n if self.align_stack:\n self.align=align=self.align_stack[-1]\n self.writer.new_alignment(align)\n else :\n self.align=None\n self.writer.new_alignment(None )\n \n def push_font(self,font):\n size,i,b,tt=font\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n if self.font_stack:\n csize,ci,cb,ctt=self.font_stack[-1]\n if size is AS_IS:size=csize\n if i is AS_IS:i=ci\n if b is AS_IS:b=cb\n if tt is AS_IS:tt=ctt\n font=(size,i,b,tt)\n self.font_stack.append(font)\n self.writer.new_font(font)\n \n def pop_font(self):\n if self.font_stack:\n del self.font_stack[-1]\n if self.font_stack:\n font=self.font_stack[-1]\n else :\n font=None\n self.writer.new_font(font)\n \n def push_margin(self,margin):\n self.margin_stack.append(margin)\n fstack=[m for m in self.margin_stack if m]\n if not margin and fstack:\n margin=fstack[-1]\n self.writer.new_margin(margin,len(fstack))\n \n def pop_margin(self):\n if self.margin_stack:\n del self.margin_stack[-1]\n fstack=[m for m in self.margin_stack if m]\n if fstack:\n margin=fstack[-1]\n else :\n margin=None\n self.writer.new_margin(margin,len(fstack))\n \n def set_spacing(self,spacing):\n self.spacing=spacing\n self.writer.new_spacing(spacing)\n \n def push_style(self,*styles):\n if self.softspace:\n self.hard_break=self.para_end=self.softspace=0\n self.nospace=1\n self.writer.send_flowing_data(' ')\n for style in styles:\n self.style_stack.append(style)\n self.writer.new_styles(tuple(self.style_stack))\n \n def pop_style(self,n=1):\n del self.style_stack[-n:]\n self.writer.new_styles(tuple(self.style_stack))\n \n def assert_line_data(self,flag=1):\n self.nospace=self.hard_break=not flag\n self.para_end=self.parskip=self.have_label=0\n \n \nclass NullWriter:\n ''\n\n\n\n\n\n \n def __init__(self):pass\n def flush(self):pass\n def new_alignment(self,align):pass\n def new_font(self,font):pass\n def new_margin(self,margin,level):pass\n def new_spacing(self,spacing):pass\n def new_styles(self,styles):pass\n def send_paragraph(self,blankline):pass\n def send_line_break(self):pass\n def send_hor_rule(self,*args,**kw):pass\n def send_label_data(self,data):pass\n def send_flowing_data(self,data):pass\n def send_literal_data(self,data):pass\n \n \nclass AbstractWriter(NullWriter):\n ''\n\n\n\n\n \n \n def new_alignment(self,align):\n print(\"new_alignment(%r)\"%(align,))\n \n def new_font(self,font):\n print(\"new_font(%r)\"%(font,))\n \n def new_margin(self,margin,level):\n print(\"new_margin(%r, %d)\"%(margin,level))\n \n def new_spacing(self,spacing):\n print(\"new_spacing(%r)\"%(spacing,))\n \n def new_styles(self,styles):\n print(\"new_styles(%r)\"%(styles,))\n \n def send_paragraph(self,blankline):\n print(\"send_paragraph(%r)\"%(blankline,))\n \n def send_line_break(self):\n print(\"send_line_break()\")\n \n def send_hor_rule(self,*args,**kw):\n print(\"send_hor_rule()\")\n \n def send_label_data(self,data):\n print(\"send_label_data(%r)\"%(data,))\n \n def send_flowing_data(self,data):\n print(\"send_flowing_data(%r)\"%(data,))\n \n def send_literal_data(self,data):\n print(\"send_literal_data(%r)\"%(data,))\n \n \nclass DumbWriter(NullWriter):\n ''\n\n\n\n\n\n \n \n def __init__(self,file=None ,maxcol=72):\n self.file=file or sys.stdout\n self.maxcol=maxcol\n NullWriter.__init__(self)\n self.reset()\n \n def reset(self):\n self.col=0\n self.atbreak=0\n \n def send_paragraph(self,blankline):\n self.file.write('\\n'*blankline)\n self.col=0\n self.atbreak=0\n \n def send_line_break(self):\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_hor_rule(self,*args,**kw):\n self.file.write('\\n')\n self.file.write('-'*self.maxcol)\n self.file.write('\\n')\n self.col=0\n self.atbreak=0\n \n def send_literal_data(self,data):\n self.file.write(data)\n i=data.rfind('\\n')\n if i >=0:\n self.col=0\n data=data[i+1:]\n data=data.expandtabs()\n self.col=self.col+len(data)\n self.atbreak=0\n \n def send_flowing_data(self,data):\n if not data:return\n atbreak=self.atbreak or data[0].isspace()\n col=self.col\n maxcol=self.maxcol\n write=self.file.write\n for word in data.split():\n if atbreak:\n if col+len(word)>=maxcol:\n write('\\n')\n col=0\n else :\n write(' ')\n col=col+1\n write(word)\n col=col+len(word)\n atbreak=1\n self.col=col\n self.atbreak=data[-1].isspace()\n \n \ndef test(file=None ):\n w=DumbWriter()\n f=AbstractFormatter(w)\n if file is not None :\n fp=open(file)\n elif sys.argv[1:]:\n fp=open(sys.argv[1])\n else :\n fp=sys.stdin\n try :\n for line in fp:\n if line =='\\n':\n f.end_paragraph(1)\n else :\n f.add_flowing_data(line)\n finally :\n if fp is not sys.stdin:\n fp.close()\n f.end_paragraph(0)\n \n \nif __name__ =='__main__':\n test()\n", ["sys", "warnings"]], "fractions": [".py", "\n\n\n\"\"\"Fraction, infinite-precision, real numbers.\"\"\"\n\nfrom decimal import Decimal\nimport math\nimport numbers\nimport operator\nimport re\nimport sys\n\n__all__=['Fraction']\n\n\n\n\n_PyHASH_MODULUS=sys.hash_info.modulus\n\n\n_PyHASH_INF=sys.hash_info.inf\n\n_RATIONAL_FORMAT=re.compile(r\"\"\"\n \\A\\s* # optional whitespace at the start, then\n (?P[-+]?) # an optional sign, then\n (?=\\d|\\.\\d) # lookahead for digit or .digit\n (?P\\d*) # numerator (possibly empty)\n (?: # followed by\n (?:/(?P\\d+))? # an optional denominator\n | # or\n (?:\\.(?P\\d*))? # an optional fractional part\n (?:E(?P[-+]?\\d+))? # and optional exponent\n )\n \\s*\\Z # and optional whitespace to finish\n\"\"\",re.VERBOSE |re.IGNORECASE)\n\n\nclass Fraction(numbers.Rational):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_numerator','_denominator')\n \n \n def __new__(cls,numerator=0,denominator=None ,*,_normalize=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self=super(Fraction,cls).__new__(cls)\n \n if denominator is None :\n if type(numerator)is int:\n self._numerator=numerator\n self._denominator=1\n return self\n \n elif isinstance(numerator,numbers.Rational):\n self._numerator=numerator.numerator\n self._denominator=numerator.denominator\n return self\n \n elif isinstance(numerator,(float,Decimal)):\n \n self._numerator,self._denominator=numerator.as_integer_ratio()\n return self\n \n elif isinstance(numerator,str):\n \n m=_RATIONAL_FORMAT.match(numerator)\n if m is None :\n raise ValueError('Invalid literal for Fraction: %r'%\n numerator)\n numerator=int(m.group('num')or '0')\n denom=m.group('denom')\n if denom:\n denominator=int(denom)\n else :\n denominator=1\n decimal=m.group('decimal')\n if decimal:\n scale=10 **len(decimal)\n numerator=numerator *scale+int(decimal)\n denominator *=scale\n exp=m.group('exp')\n if exp:\n exp=int(exp)\n if exp >=0:\n numerator *=10 **exp\n else :\n denominator *=10 **-exp\n if m.group('sign')=='-':\n numerator=-numerator\n \n else :\n raise TypeError(\"argument should be a string \"\n \"or a Rational instance\")\n \n elif type(numerator)is int is type(denominator):\n pass\n \n elif (isinstance(numerator,numbers.Rational)and\n isinstance(denominator,numbers.Rational)):\n numerator,denominator=(\n numerator.numerator *denominator.denominator,\n denominator.numerator *numerator.denominator\n )\n else :\n raise TypeError(\"both arguments should be \"\n \"Rational instances\")\n \n if denominator ==0:\n raise ZeroDivisionError('Fraction(%s, 0)'%numerator)\n if _normalize:\n g=math.gcd(numerator,denominator)\n if denominator <0:\n g=-g\n numerator //=g\n denominator //=g\n self._numerator=numerator\n self._denominator=denominator\n return self\n \n @classmethod\n def from_float(cls,f):\n ''\n\n\n\n \n if isinstance(f,numbers.Integral):\n return cls(f)\n elif not isinstance(f,float):\n raise TypeError(\"%s.from_float() only takes floats, not %r (%s)\"%\n (cls.__name__,f,type(f).__name__))\n return cls(*f.as_integer_ratio())\n \n @classmethod\n def from_decimal(cls,dec):\n ''\n from decimal import Decimal\n if isinstance(dec,numbers.Integral):\n dec=Decimal(int(dec))\n elif not isinstance(dec,Decimal):\n raise TypeError(\n \"%s.from_decimal() only takes Decimals, not %r (%s)\"%\n (cls.__name__,dec,type(dec).__name__))\n return cls(*dec.as_integer_ratio())\n \n def as_integer_ratio(self):\n ''\n\n\n\n \n return (self._numerator,self._denominator)\n \n def limit_denominator(self,max_denominator=1000000):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if max_denominator <1:\n raise ValueError(\"max_denominator should be at least 1\")\n if self._denominator <=max_denominator:\n return Fraction(self)\n \n p0,q0,p1,q1=0,1,1,0\n n,d=self._numerator,self._denominator\n while True :\n a=n //d\n q2=q0+a *q1\n if q2 >max_denominator:\n break\n p0,q0,p1,q1=p1,q1,p0+a *p1,q2\n n,d=d,n -a *d\n \n k=(max_denominator -q0)//q1\n bound1=Fraction(p0+k *p1,q0+k *q1)\n bound2=Fraction(p1,q1)\n if abs(bound2 -self)<=abs(bound1 -self):\n return bound2\n else :\n return bound1\n \n @property\n def numerator(a):\n return a._numerator\n \n @property\n def denominator(a):\n return a._denominator\n \n def __repr__(self):\n ''\n return '%s(%s, %s)'%(self.__class__.__name__,\n self._numerator,self._denominator)\n \n def __str__(self):\n ''\n if self._denominator ==1:\n return str(self._numerator)\n else :\n return '%s/%s'%(self._numerator,self._denominator)\n \n def _operator_fallbacks(monomorphic_operator,fallback_operator):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def forward(a,b):\n if isinstance(b,(int,Fraction)):\n return monomorphic_operator(a,b)\n elif isinstance(b,float):\n return fallback_operator(float(a),b)\n elif isinstance(b,complex):\n return fallback_operator(complex(a),b)\n else :\n return NotImplemented\n forward.__name__='__'+fallback_operator.__name__+'__'\n forward.__doc__=monomorphic_operator.__doc__\n \n def reverse(b,a):\n if isinstance(a,numbers.Rational):\n \n return monomorphic_operator(a,b)\n elif isinstance(a,numbers.Real):\n return fallback_operator(float(a),float(b))\n elif isinstance(a,numbers.Complex):\n return fallback_operator(complex(a),complex(b))\n else :\n return NotImplemented\n reverse.__name__='__r'+fallback_operator.__name__+'__'\n reverse.__doc__=monomorphic_operator.__doc__\n \n return forward,reverse\n \n def _add(a,b):\n ''\n da,db=a.denominator,b.denominator\n return Fraction(a.numerator *db+b.numerator *da,\n da *db)\n \n __add__,__radd__=_operator_fallbacks(_add,operator.add)\n \n def _sub(a,b):\n ''\n da,db=a.denominator,b.denominator\n return Fraction(a.numerator *db -b.numerator *da,\n da *db)\n \n __sub__,__rsub__=_operator_fallbacks(_sub,operator.sub)\n \n def _mul(a,b):\n ''\n return Fraction(a.numerator *b.numerator,a.denominator *b.denominator)\n \n __mul__,__rmul__=_operator_fallbacks(_mul,operator.mul)\n \n def _div(a,b):\n ''\n return Fraction(a.numerator *b.denominator,\n a.denominator *b.numerator)\n \n __truediv__,__rtruediv__=_operator_fallbacks(_div,operator.truediv)\n \n def _floordiv(a,b):\n ''\n return (a.numerator *b.denominator)//(a.denominator *b.numerator)\n \n __floordiv__,__rfloordiv__=_operator_fallbacks(_floordiv,operator.floordiv)\n \n def _divmod(a,b):\n ''\n da,db=a.denominator,b.denominator\n div,n_mod=divmod(a.numerator *db,da *b.numerator)\n return div,Fraction(n_mod,da *db)\n \n __divmod__,__rdivmod__=_operator_fallbacks(_divmod,divmod)\n \n def _mod(a,b):\n ''\n da,db=a.denominator,b.denominator\n return Fraction((a.numerator *db)%(b.numerator *da),da *db)\n \n __mod__,__rmod__=_operator_fallbacks(_mod,operator.mod)\n \n def __pow__(a,b):\n ''\n\n\n\n\n\n \n if isinstance(b,numbers.Rational):\n if b.denominator ==1:\n power=b.numerator\n if power >=0:\n return Fraction(a._numerator **power,\n a._denominator **power,\n _normalize=False )\n elif a._numerator >=0:\n return Fraction(a._denominator **-power,\n a._numerator **-power,\n _normalize=False )\n else :\n return Fraction((-a._denominator)**-power,\n (-a._numerator)**-power,\n _normalize=False )\n else :\n \n \n return float(a)**float(b)\n else :\n return float(a)**b\n \n def __rpow__(b,a):\n ''\n if b._denominator ==1 and b._numerator >=0:\n \n return a **b._numerator\n \n if isinstance(a,numbers.Rational):\n return Fraction(a.numerator,a.denominator)**b\n \n if b._denominator ==1:\n return a **b._numerator\n \n return a **float(b)\n \n def __pos__(a):\n ''\n return Fraction(a._numerator,a._denominator,_normalize=False )\n \n def __neg__(a):\n ''\n return Fraction(-a._numerator,a._denominator,_normalize=False )\n \n def __abs__(a):\n ''\n return Fraction(abs(a._numerator),a._denominator,_normalize=False )\n \n def __trunc__(a):\n ''\n if a._numerator <0:\n return -(-a._numerator //a._denominator)\n else :\n return a._numerator //a._denominator\n \n def __floor__(a):\n ''\n return a.numerator //a.denominator\n \n def __ceil__(a):\n ''\n \n return -(-a.numerator //a.denominator)\n \n def __round__(self,ndigits=None ):\n ''\n\n\n \n if ndigits is None :\n floor,remainder=divmod(self.numerator,self.denominator)\n if remainder *2 self.denominator:\n return floor+1\n \n elif floor %2 ==0:\n return floor\n else :\n return floor+1\n shift=10 **abs(ndigits)\n \n \n \n if ndigits >0:\n return Fraction(round(self *shift),shift)\n else :\n return Fraction(round(self /shift)*shift)\n \n def __hash__(self):\n ''\n \n \n \n \n \n \n try :\n dinv=pow(self._denominator,-1,_PyHASH_MODULUS)\n except ValueError:\n \n hash_=_PyHASH_INF\n else :\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n hash_=hash(hash(abs(self._numerator))*dinv)\n result=hash_ if self._numerator >=0 else -hash_\n return -2 if result ==-1 else result\n \n def __eq__(a,b):\n ''\n if type(b)is int:\n return a._numerator ==b and a._denominator ==1\n if isinstance(b,numbers.Rational):\n return (a._numerator ==b.numerator and\n a._denominator ==b.denominator)\n if isinstance(b,numbers.Complex)and b.imag ==0:\n b=b.real\n if isinstance(b,float):\n if math.isnan(b)or math.isinf(b):\n \n \n return 0.0 ==b\n else :\n return a ==a.from_float(b)\n else :\n \n \n return NotImplemented\n \n def _richcmp(self,other,op):\n ''\n\n\n\n\n\n\n\n \n \n if isinstance(other,numbers.Rational):\n return op(self._numerator *other.denominator,\n self._denominator *other.numerator)\n if isinstance(other,float):\n if math.isnan(other)or math.isinf(other):\n return op(0.0,other)\n else :\n return op(self,self.from_float(other))\n else :\n return NotImplemented\n \n def __lt__(a,b):\n ''\n return a._richcmp(b,operator.lt)\n \n def __gt__(a,b):\n ''\n return a._richcmp(b,operator.gt)\n \n def __le__(a,b):\n ''\n return a._richcmp(b,operator.le)\n \n def __ge__(a,b):\n ''\n return a._richcmp(b,operator.ge)\n \n def __bool__(a):\n ''\n \n \n return bool(a._numerator)\n \n \n \n def __reduce__(self):\n return (self.__class__,(str(self),))\n \n def __copy__(self):\n if type(self)==Fraction:\n return self\n return self.__class__(self._numerator,self._denominator)\n \n def __deepcopy__(self,memo):\n if type(self)==Fraction:\n return self\n return self.__class__(self._numerator,self._denominator)\n", ["decimal", "math", "numbers", "operator", "re", "sys"]], "functools": [".py", "''\n\n\n\n\n\n\n\n\n\n\n__all__=['update_wrapper','wraps','WRAPPER_ASSIGNMENTS','WRAPPER_UPDATES',\n'total_ordering','cache','cmp_to_key','lru_cache','reduce',\n'partial','partialmethod','singledispatch','singledispatchmethod',\n'cached_property']\n\nfrom abc import get_cache_token\nfrom collections import namedtuple\n\nfrom reprlib import recursive_repr\nfrom _thread import RLock\nfrom types import GenericAlias\n\n\n\n\n\n\n\n\n\nWRAPPER_ASSIGNMENTS=('__module__','__name__','__qualname__','__doc__',\n'__annotations__')\nWRAPPER_UPDATES=('__dict__',)\ndef update_wrapper(wrapper,\nwrapped,\nassigned=WRAPPER_ASSIGNMENTS,\nupdated=WRAPPER_UPDATES):\n ''\n\n\n\n\n\n\n\n\n\n \n for attr in assigned:\n try :\n value=getattr(wrapped,attr)\n except AttributeError:\n pass\n else :\n setattr(wrapper,attr,value)\n for attr in updated:\n getattr(wrapper,attr).update(getattr(wrapped,attr,{}))\n \n \n wrapper.__wrapped__=wrapped\n \n return wrapper\n \ndef wraps(wrapped,\nassigned=WRAPPER_ASSIGNMENTS,\nupdated=WRAPPER_UPDATES):\n ''\n\n\n\n\n\n\n \n return partial(update_wrapper,wrapped=wrapped,\n assigned=assigned,updated=updated)\n \n \n \n \n \n \n \n \n \n \n \ndef _gt_from_lt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__lt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result and self !=other\n \ndef _le_from_lt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__lt__(other)\n if op_result is NotImplemented:\n return op_result\n return op_result or self ==other\n \ndef _ge_from_lt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__lt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \ndef _ge_from_le(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__le__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result or self ==other\n \ndef _lt_from_le(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__le__(other)\n if op_result is NotImplemented:\n return op_result\n return op_result and self !=other\n \ndef _gt_from_le(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__le__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \ndef _lt_from_gt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__gt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result and self !=other\n \ndef _ge_from_gt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__gt__(other)\n if op_result is NotImplemented:\n return op_result\n return op_result or self ==other\n \ndef _le_from_gt(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__gt__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \ndef _le_from_ge(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__ge__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result or self ==other\n \ndef _gt_from_ge(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__ge__(other)\n if op_result is NotImplemented:\n return op_result\n return op_result and self !=other\n \ndef _lt_from_ge(self,other,NotImplemented=NotImplemented):\n ''\n op_result=self.__ge__(other)\n if op_result is NotImplemented:\n return op_result\n return not op_result\n \n_convert={\n'__lt__':[('__gt__',_gt_from_lt),\n('__le__',_le_from_lt),\n('__ge__',_ge_from_lt)],\n'__le__':[('__ge__',_ge_from_le),\n('__lt__',_lt_from_le),\n('__gt__',_gt_from_le)],\n'__gt__':[('__lt__',_lt_from_gt),\n('__ge__',_ge_from_gt),\n('__le__',_le_from_gt)],\n'__ge__':[('__le__',_le_from_ge),\n('__gt__',_gt_from_ge),\n('__lt__',_lt_from_ge)]\n}\n\ndef total_ordering(cls):\n ''\n \n roots={op for op in _convert if getattr(cls,op,None )is not getattr(object,op,None )}\n if not roots:\n raise ValueError('must define at least one ordering operation: < > <= >=')\n root=max(roots)\n for opname,opfunc in _convert[root]:\n if opname not in roots:\n opfunc.__name__=opname\n setattr(cls,opname,opfunc)\n return cls\n \n \n \n \n \n \ndef cmp_to_key(mycmp):\n ''\n class K(object):\n __slots__=['obj']\n def __init__(self,obj):\n self.obj=obj\n def __lt__(self,other):\n return mycmp(self.obj,other.obj)<0\n def __gt__(self,other):\n return mycmp(self.obj,other.obj)>0\n def __eq__(self,other):\n return mycmp(self.obj,other.obj)==0\n def __le__(self,other):\n return mycmp(self.obj,other.obj)<=0\n def __ge__(self,other):\n return mycmp(self.obj,other.obj)>=0\n __hash__=None\n return K\n \ntry :\n from _functools import cmp_to_key\nexcept ImportError:\n pass\n \n \n \n \n \n \n_initial_missing=object()\n\ndef reduce(function,sequence,initial=_initial_missing):\n ''\n\n\n\n\n\n\n\n\n \n \n it=iter(sequence)\n \n if initial is _initial_missing:\n try :\n value=next(it)\n except StopIteration:\n raise TypeError(\"reduce() of empty sequence with no initial value\")from None\n else :\n value=initial\n \n for element in it:\n value=function(value,element)\n \n return value\n \ntry :\n from _functools import reduce\nexcept ImportError:\n pass\n \n \n \n \n \n \n \nclass partial:\n ''\n\n \n \n __slots__=\"func\",\"args\",\"keywords\",\"__dict__\",\"__weakref__\"\n \n def __new__(cls,func,/,*args,**keywords):\n if not callable(func):\n raise TypeError(\"the first argument must be callable\")\n \n if hasattr(func,\"func\"):\n args=func.args+args\n keywords={**func.keywords,**keywords}\n func=func.func\n \n self=super(partial,cls).__new__(cls)\n \n self.func=func\n self.args=args\n self.keywords=keywords\n return self\n \n def __call__(self,/,*args,**keywords):\n keywords={**self.keywords,**keywords}\n return self.func(*self.args,*args,**keywords)\n \n @recursive_repr()\n def __repr__(self):\n qualname=type(self).__qualname__\n args=[repr(self.func)]\n args.extend(repr(x)for x in self.args)\n args.extend(f\"{k}={v!r}\"for (k,v)in self.keywords.items())\n if type(self).__module__ ==\"functools\":\n return f\"functools.{qualname}({', '.join(args)})\"\n return f\"{qualname}({', '.join(args)})\"\n \n def __reduce__(self):\n return type(self),(self.func,),(self.func,self.args,\n self.keywords or None ,self.__dict__ or None )\n \n def __setstate__(self,state):\n if not isinstance(state,tuple):\n raise TypeError(\"argument to __setstate__ must be a tuple\")\n if len(state)!=4:\n raise TypeError(f\"expected 4 items in state, got {len(state)}\")\n func,args,kwds,namespace=state\n if (not callable(func)or not isinstance(args,tuple)or\n (kwds is not None and not isinstance(kwds,dict))or\n (namespace is not None and not isinstance(namespace,dict))):\n raise TypeError(\"invalid partial state\")\n \n args=tuple(args)\n if kwds is None :\n kwds={}\n elif type(kwds)is not dict:\n kwds=dict(kwds)\n if namespace is None :\n namespace={}\n \n self.__dict__=namespace\n self.func=func\n self.args=args\n self.keywords=kwds\n \ntry :\n from _functools import partial\nexcept ImportError:\n pass\n \n \nclass partialmethod(object):\n ''\n\n\n\n\n \n \n def __init__(self,func,/,*args,**keywords):\n if not callable(func)and not hasattr(func,\"__get__\"):\n raise TypeError(\"{!r} is not callable or a descriptor\"\n .format(func))\n \n \n \n if isinstance(func,partialmethod):\n \n \n \n self.func=func.func\n self.args=func.args+args\n self.keywords={**func.keywords,**keywords}\n else :\n self.func=func\n self.args=args\n self.keywords=keywords\n \n def __repr__(self):\n args=\", \".join(map(repr,self.args))\n keywords=\", \".join(\"{}={!r}\".format(k,v)\n for k,v in self.keywords.items())\n format_string=\"{module}.{cls}({func}, {args}, {keywords})\"\n return format_string.format(module=self.__class__.__module__,\n cls=self.__class__.__qualname__,\n func=self.func,\n args=args,\n keywords=keywords)\n \n def _make_unbound_method(self):\n def _method(cls_or_self,/,*args,**keywords):\n keywords={**self.keywords,**keywords}\n return self.func(cls_or_self,*self.args,*args,**keywords)\n _method.__isabstractmethod__=self.__isabstractmethod__\n _method._partialmethod=self\n return _method\n \n def __get__(self,obj,cls=None ):\n get=getattr(self.func,\"__get__\",None )\n result=None\n if get is not None :\n new_func=get(obj,cls)\n if new_func is not self.func:\n \n \n result=partial(new_func,*self.args,**self.keywords)\n try :\n result.__self__=new_func.__self__\n except AttributeError:\n pass\n if result is None :\n \n \n result=self._make_unbound_method().__get__(obj,cls)\n return result\n \n @property\n def __isabstractmethod__(self):\n return getattr(self.func,\"__isabstractmethod__\",False )\n \n __class_getitem__=classmethod(GenericAlias)\n \n \n \n \ndef _unwrap_partial(func):\n while isinstance(func,partial):\n func=func.func\n return func\n \n \n \n \n \n_CacheInfo=namedtuple(\"CacheInfo\",[\"hits\",\"misses\",\"maxsize\",\"currsize\"])\n\nclass _HashedSeq(list):\n ''\n\n\n\n \n \n __slots__='hashvalue'\n \n def __init__(self,tup,hash=hash):\n self[:]=tup\n self.hashvalue=hash(tup)\n \n def __hash__(self):\n return self.hashvalue\n \ndef _make_key(args,kwds,typed,\nkwd_mark=(object(),),\nfasttypes={int,str},\ntuple=tuple,type=type,len=len):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n \n key=args\n if kwds:\n key +=kwd_mark\n for item in kwds.items():\n key +=item\n if typed:\n key +=tuple(type(v)for v in args)\n if kwds:\n key +=tuple(type(v)for v in kwds.values())\n elif len(key)==1 and type(key[0])in fasttypes:\n return key[0]\n return _HashedSeq(key)\n \ndef lru_cache(maxsize=128,typed=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n if isinstance(maxsize,int):\n \n if maxsize <0:\n maxsize=0\n elif callable(maxsize)and isinstance(typed,bool):\n \n user_function,maxsize=maxsize,128\n wrapper=_lru_cache_wrapper(user_function,maxsize,typed,_CacheInfo)\n wrapper.cache_parameters=lambda :{'maxsize':maxsize,'typed':typed}\n return update_wrapper(wrapper,user_function)\n elif maxsize is not None :\n raise TypeError(\n 'Expected first argument to be an integer, a callable, or None')\n \n def decorating_function(user_function):\n wrapper=_lru_cache_wrapper(user_function,maxsize,typed,_CacheInfo)\n wrapper.cache_parameters=lambda :{'maxsize':maxsize,'typed':typed}\n return update_wrapper(wrapper,user_function)\n \n return decorating_function\n \ndef _lru_cache_wrapper(user_function,maxsize,typed,_CacheInfo):\n\n sentinel=object()\n make_key=_make_key\n PREV,NEXT,KEY,RESULT=0,1,2,3\n \n cache={}\n hits=misses=0\n full=False\n cache_get=cache.get\n cache_len=cache.__len__\n lock=RLock()\n root=[]\n root[:]=[root,root,None ,None ]\n \n if maxsize ==0:\n \n def wrapper(*args,**kwds):\n \n nonlocal misses\n misses +=1\n result=user_function(*args,**kwds)\n return result\n \n elif maxsize is None :\n \n def wrapper(*args,**kwds):\n \n nonlocal hits,misses\n key=make_key(args,kwds,typed)\n result=cache_get(key,sentinel)\n if result is not sentinel:\n hits +=1\n return result\n misses +=1\n result=user_function(*args,**kwds)\n cache[key]=result\n return result\n \n else :\n \n def wrapper(*args,**kwds):\n \n nonlocal root,hits,misses,full\n key=make_key(args,kwds,typed)\n with lock:\n link=cache_get(key)\n if link is not None :\n \n link_prev,link_next,_key,result=link\n link_prev[NEXT]=link_next\n link_next[PREV]=link_prev\n last=root[PREV]\n last[NEXT]=root[PREV]=link\n link[PREV]=last\n link[NEXT]=root\n hits +=1\n return result\n misses +=1\n result=user_function(*args,**kwds)\n with lock:\n if key in cache:\n \n \n \n \n pass\n elif full:\n \n oldroot=root\n oldroot[KEY]=key\n oldroot[RESULT]=result\n \n \n \n \n \n \n root=oldroot[NEXT]\n oldkey=root[KEY]\n oldresult=root[RESULT]\n root[KEY]=root[RESULT]=None\n \n del cache[oldkey]\n \n \n \n cache[key]=oldroot\n else :\n \n last=root[PREV]\n link=[last,root,key,result]\n last[NEXT]=root[PREV]=cache[key]=link\n \n \n full=(cache_len()>=maxsize)\n return result\n \n def cache_info():\n ''\n with lock:\n return _CacheInfo(hits,misses,maxsize,cache_len())\n \n def cache_clear():\n ''\n nonlocal hits,misses,full\n with lock:\n cache.clear()\n root[:]=[root,root,None ,None ]\n hits=misses=0\n full=False\n \n wrapper.cache_info=cache_info\n wrapper.cache_clear=cache_clear\n return wrapper\n \ntry :\n from _functools import _lru_cache_wrapper\nexcept ImportError:\n pass\n \n \n \n \n \n \ndef cache(user_function,/):\n ''\n return lru_cache(maxsize=None )(user_function)\n \n \n \n \n \n \ndef _c3_merge(sequences):\n ''\n\n\n\n \n result=[]\n while True :\n sequences=[s for s in sequences if s]\n if not sequences:\n return result\n for s1 in sequences:\n candidate=s1[0]\n for s2 in sequences:\n if candidate in s2[1:]:\n candidate=None\n break\n else :\n break\n if candidate is None :\n raise RuntimeError(\"Inconsistent hierarchy\")\n result.append(candidate)\n \n for seq in sequences:\n if seq[0]==candidate:\n del seq[0]\n \ndef _c3_mro(cls,abcs=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n for i,base in enumerate(reversed(cls.__bases__)):\n if hasattr(base,'__abstractmethods__'):\n boundary=len(cls.__bases__)-i\n break\n else :\n boundary=0\n abcs=list(abcs)if abcs else []\n explicit_bases=list(cls.__bases__[:boundary])\n abstract_bases=[]\n other_bases=list(cls.__bases__[boundary:])\n for base in abcs:\n if issubclass(cls,base)and not any(\n issubclass(b,base)for b in cls.__bases__\n ):\n \n \n abstract_bases.append(base)\n for base in abstract_bases:\n abcs.remove(base)\n explicit_c3_mros=[_c3_mro(base,abcs=abcs)for base in explicit_bases]\n abstract_c3_mros=[_c3_mro(base,abcs=abcs)for base in abstract_bases]\n other_c3_mros=[_c3_mro(base,abcs=abcs)for base in other_bases]\n return _c3_merge(\n [[cls]]+\n explicit_c3_mros+abstract_c3_mros+other_c3_mros+\n [explicit_bases]+[abstract_bases]+[other_bases]\n )\n \ndef _compose_mro(cls,types):\n ''\n\n\n\n\n \n bases=set(cls.__mro__)\n \n def is_related(typ):\n return (typ not in bases and hasattr(typ,'__mro__')\n and issubclass(cls,typ))\n types=[n for n in types if is_related(n)]\n \n \n def is_strict_base(typ):\n for other in types:\n if typ !=other and typ in other.__mro__:\n return True\n return False\n types=[n for n in types if not is_strict_base(n)]\n \n \n type_set=set(types)\n mro=[]\n for typ in types:\n found=[]\n for sub in typ.__subclasses__():\n if sub not in bases and issubclass(cls,sub):\n found.append([s for s in sub.__mro__ if s in type_set])\n if not found:\n mro.append(typ)\n continue\n \n found.sort(key=len,reverse=True )\n for sub in found:\n for subcls in sub:\n if subcls not in mro:\n mro.append(subcls)\n return _c3_mro(cls,abcs=mro)\n \ndef _find_impl(cls,registry):\n ''\n\n\n\n\n\n\n\n \n mro=_compose_mro(cls,registry.keys())\n match=None\n for t in mro:\n if match is not None :\n \n \n if (t in registry and t not in cls.__mro__\n and match not in cls.__mro__\n and not issubclass(match,t)):\n raise RuntimeError(\"Ambiguous dispatch: {} or {}\".format(\n match,t))\n break\n if t in registry:\n match=t\n return registry.get(match)\n \ndef singledispatch(func):\n ''\n\n\n\n\n\n\n \n \n \n \n import types,weakref\n \n registry={}\n dispatch_cache=weakref.WeakKeyDictionary()\n cache_token=None\n \n def dispatch(cls):\n ''\n\n\n\n\n \n nonlocal cache_token\n if cache_token is not None :\n current_token=get_cache_token()\n if cache_token !=current_token:\n dispatch_cache.clear()\n cache_token=current_token\n try :\n impl=dispatch_cache[cls]\n except KeyError:\n try :\n impl=registry[cls]\n except KeyError:\n impl=_find_impl(cls,registry)\n dispatch_cache[cls]=impl\n return impl\n \n def register(cls,func=None ):\n ''\n\n\n\n \n nonlocal cache_token\n if func is None :\n if isinstance(cls,type):\n return lambda f:register(cls,f)\n ann=getattr(cls,'__annotations__',{})\n if not ann:\n raise TypeError(\n f\"Invalid first argument to `register()`: {cls!r}. \"\n f\"Use either `@register(some_class)` or plain `@register` \"\n f\"on an annotated function.\"\n )\n func=cls\n \n \n from typing import get_type_hints\n argname,cls=next(iter(get_type_hints(func).items()))\n if not isinstance(cls,type):\n raise TypeError(\n f\"Invalid annotation for {argname!r}. \"\n f\"{cls!r} is not a class.\"\n )\n registry[cls]=func\n if cache_token is None and hasattr(cls,'__abstractmethods__'):\n cache_token=get_cache_token()\n dispatch_cache.clear()\n return func\n \n def wrapper(*args,**kw):\n if not args:\n raise TypeError(f'{funcname} requires at least '\n '1 positional argument')\n \n return dispatch(args[0].__class__)(*args,**kw)\n \n funcname=getattr(func,'__name__','singledispatch function')\n registry[object]=func\n wrapper.register=register\n wrapper.dispatch=dispatch\n wrapper.registry=types.MappingProxyType(registry)\n wrapper._clear_cache=dispatch_cache.clear\n update_wrapper(wrapper,func)\n return wrapper\n \n \n \nclass singledispatchmethod:\n ''\n\n\n\n \n \n def __init__(self,func):\n if not callable(func)and not hasattr(func,\"__get__\"):\n raise TypeError(f\"{func!r} is not callable or a descriptor\")\n \n self.dispatcher=singledispatch(func)\n self.func=func\n \n def register(self,cls,method=None ):\n ''\n\n\n \n return self.dispatcher.register(cls,func=method)\n \n def __get__(self,obj,cls=None ):\n def _method(*args,**kwargs):\n method=self.dispatcher.dispatch(args[0].__class__)\n return method.__get__(obj,cls)(*args,**kwargs)\n \n _method.__isabstractmethod__=self.__isabstractmethod__\n _method.register=self.register\n update_wrapper(_method,self.func)\n return _method\n \n @property\n def __isabstractmethod__(self):\n return getattr(self.func,'__isabstractmethod__',False )\n \n \n \n \n \n \n_NOT_FOUND=object()\n\n\nclass cached_property:\n def __init__(self,func):\n self.func=func\n self.attrname=None\n self.__doc__=func.__doc__\n self.lock=RLock()\n \n def __set_name__(self,owner,name):\n if self.attrname is None :\n self.attrname=name\n elif name !=self.attrname:\n raise TypeError(\n \"Cannot assign the same cached_property to two different names \"\n f\"({self.attrname!r} and {name!r}).\"\n )\n \n def __get__(self,instance,owner=None ):\n if instance is None :\n return self\n if self.attrname is None :\n raise TypeError(\n \"Cannot use cached_property instance without calling __set_name__ on it.\")\n try :\n cache=instance.__dict__\n except AttributeError:\n msg=(\n f\"No '__dict__' attribute on {type(instance).__name__!r} \"\n f\"instance to cache {self.attrname!r} property.\"\n )\n raise TypeError(msg)from None\n val=cache.get(self.attrname,_NOT_FOUND)\n if val is _NOT_FOUND:\n with self.lock:\n \n val=cache.get(self.attrname,_NOT_FOUND)\n if val is _NOT_FOUND:\n val=self.func(instance)\n try :\n cache[self.attrname]=val\n except TypeError:\n msg=(\n f\"The '__dict__' attribute on {type(instance).__name__!r} instance \"\n f\"does not support item assignment for caching {self.attrname!r} property.\"\n )\n raise TypeError(msg)from None\n return val\n \n __class_getitem__=classmethod(GenericAlias)\n", ["_functools", "_thread", "abc", "collections", "reprlib", "types", "typing", "weakref"]], "gc": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nDEBUG_COLLECTABLE=2\n\nDEBUG_LEAK=38\n\nDEBUG_SAVEALL=32\n\nDEBUG_STATS=1\n\nDEBUG_UNCOLLECTABLE=4\n\nclass __loader__:\n pass\n \ncallbacks=[]\n\ndef collect(*args,**kw):\n ''\n\n\n\n\n\n \n pass\n \ndef disable(*args,**kw):\n ''\n\n \n pass\n \ndef enable(*args,**kw):\n ''\n\n \n pass\n \ngarbage=[]\n\ndef get_count(*args,**kw):\n ''\n\n \n pass\n \ndef get_debug(*args,**kw):\n ''\n\n \n pass\n \ndef get_objects(*args,**kw):\n ''\n\n\n \n pass\n \ndef get_referents(*args,**kw):\n ''\n pass\n \ndef get_referrers(*args,**kw):\n ''\n pass\n \ndef get_threshold(*args,**kw):\n ''\n\n \n pass\n \ndef is_tracked(*args,**kw):\n ''\n\n\n \n pass\n \ndef isenabled(*args,**kw):\n ''\n\n \n pass\n \ndef set_debug(*args,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n \n pass\n \ndef set_threshold(*args,**kw):\n ''\n\n\n \n pass\n", []], "genericpath": [".py", "''\n\n\n\n\nimport os\nimport stat\n\n__all__=['commonprefix','exists','getatime','getctime','getmtime',\n'getsize','isdir','isfile','samefile','sameopenfile',\n'samestat']\n\n\n\n\ndef exists(path):\n ''\n try :\n os.stat(path)\n except (OSError,ValueError):\n return False\n return True\n \n \n \n \ndef isfile(path):\n ''\n try :\n st=os.stat(path)\n except (OSError,ValueError):\n return False\n return stat.S_ISREG(st.st_mode)\n \n \n \n \n \ndef isdir(s):\n ''\n try :\n st=os.stat(s)\n except (OSError,ValueError):\n return False\n return stat.S_ISDIR(st.st_mode)\n \n \ndef getsize(filename):\n ''\n return os.stat(filename).st_size\n \n \ndef getmtime(filename):\n ''\n return os.stat(filename).st_mtime\n \n \ndef getatime(filename):\n ''\n return os.stat(filename).st_atime\n \n \ndef getctime(filename):\n ''\n return os.stat(filename).st_ctime\n \n \n \ndef commonprefix(m):\n ''\n if not m:return ''\n \n \n \n \n if not isinstance(m[0],(list,tuple)):\n m=tuple(map(os.fspath,m))\n s1=min(m)\n s2=max(m)\n for i,c in enumerate(s1):\n if c !=s2[i]:\n return s1[:i]\n return s1\n \n \n \ndef samestat(s1,s2):\n ''\n return (s1.st_ino ==s2.st_ino and\n s1.st_dev ==s2.st_dev)\n \n \n \ndef samefile(f1,f2):\n ''\n\n\n\n \n s1=os.stat(f1)\n s2=os.stat(f2)\n return samestat(s1,s2)\n \n \n \n \ndef sameopenfile(fp1,fp2):\n ''\n s1=os.fstat(fp1)\n s2=os.fstat(fp2)\n return samestat(s1,s2)\n \n \n \n \n \n \n \n \n \ndef _splitext(p,sep,altsep,extsep):\n ''\n\n\n \n \n \n sepIndex=p.rfind(sep)\n if altsep:\n altsepIndex=p.rfind(altsep)\n sepIndex=max(sepIndex,altsepIndex)\n \n dotIndex=p.rfind(extsep)\n if dotIndex >sepIndex:\n \n filenameIndex=sepIndex+1\n while filenameIndex [ \\t]+) | # spaces and horizontal tabs\n (?P[0-9]+\\b) | # decimal integer\n (?Pn\\b) | # only n is allowed\n (?P[()]) |\n (?P[-*/%+?:]|[>,\n # <=, >=, ==, !=, &&, ||,\n # ? :\n # unary and bitwise ops\n # not allowed\n (?P\\w+|.) # invalid token\n \"\"\",re.VERBOSE |re.DOTALL)\n\ndef _tokenize(plural):\n for mo in re.finditer(_token_pattern,plural):\n kind=mo.lastgroup\n if kind =='WHITESPACES':\n continue\n value=mo.group(kind)\n if kind =='INVALID':\n raise ValueError('invalid token in plural form: %s'%value)\n yield value\n yield ''\n \ndef _error(value):\n if value:\n return ValueError('unexpected token in plural form: %s'%value)\n else :\n return ValueError('unexpected end of plural form')\n \n_binary_ops=(\n('||',),\n('&&',),\n('==','!='),\n('<','>','<=','>='),\n('+','-'),\n('*','/','%'),\n)\n_binary_ops={op:i for i,ops in enumerate(_binary_ops,1)for op in ops}\n_c2py_ops={'||':'or','&&':'and','/':'//'}\n\ndef _parse(tokens,priority=-1):\n result=''\n nexttok=next(tokens)\n while nexttok =='!':\n result +='not '\n nexttok=next(tokens)\n \n if nexttok =='(':\n sub,nexttok=_parse(tokens)\n result='%s(%s)'%(result,sub)\n if nexttok !=')':\n raise ValueError('unbalanced parenthesis in plural form')\n elif nexttok =='n':\n result='%s%s'%(result,nexttok)\n else :\n try :\n value=int(nexttok,10)\n except ValueError:\n raise _error(nexttok)from None\n result='%s%d'%(result,value)\n nexttok=next(tokens)\n \n j=100\n while nexttok in _binary_ops:\n i=_binary_ops[nexttok]\n if i 1000:\n raise ValueError('plural form expression is too long')\n try :\n result,nexttok=_parse(_tokenize(plural))\n if nexttok:\n raise _error(nexttok)\n \n depth=0\n for c in result:\n if c =='(':\n depth +=1\n if depth >20:\n \n \n raise ValueError('plural form expression is too complex')\n elif c ==')':\n depth -=1\n \n ns={'_as_int':_as_int}\n exec('''if True:\n def func(n):\n if not isinstance(n, int):\n n = _as_int(n)\n return int(%s)\n '''%result,ns)\n return ns['func']\n except RecursionError:\n \n raise ValueError('plural form expression is too complex')\n \n \ndef _expand_lang(loc):\n import locale\n loc=locale.normalize(loc)\n COMPONENT_CODESET=1 <<0\n COMPONENT_TERRITORY=1 <<1\n COMPONENT_MODIFIER=1 <<2\n \n mask=0\n pos=loc.find('@')\n if pos >=0:\n modifier=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_MODIFIER\n else :\n modifier=''\n pos=loc.find('.')\n if pos >=0:\n codeset=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_CODESET\n else :\n codeset=''\n pos=loc.find('_')\n if pos >=0:\n territory=loc[pos:]\n loc=loc[:pos]\n mask |=COMPONENT_TERRITORY\n else :\n territory=''\n language=loc\n ret=[]\n for i in range(mask+1):\n if not (i&~mask):\n val=language\n if i&COMPONENT_TERRITORY:val +=territory\n if i&COMPONENT_CODESET:val +=codeset\n if i&COMPONENT_MODIFIER:val +=modifier\n ret.append(val)\n ret.reverse()\n return ret\n \n \n \nclass NullTranslations:\n def __init__(self,fp=None ):\n self._info={}\n self._charset=None\n self._output_charset=None\n self._fallback=None\n if fp is not None :\n self._parse(fp)\n \n def _parse(self,fp):\n pass\n \n def add_fallback(self,fallback):\n if self._fallback:\n self._fallback.add_fallback(fallback)\n else :\n self._fallback=fallback\n \n def gettext(self,message):\n if self._fallback:\n return self._fallback.gettext(message)\n return message\n \n def lgettext(self,message):\n import warnings\n warnings.warn('lgettext() is deprecated, use gettext() instead',\n DeprecationWarning,2)\n import locale\n if self._fallback:\n with warnings.catch_warnings():\n warnings.filterwarnings('ignore',r'.*\\blgettext\\b.*',\n DeprecationWarning)\n return self._fallback.lgettext(message)\n if self._output_charset:\n return message.encode(self._output_charset)\n return message.encode(locale.getpreferredencoding())\n \n def ngettext(self,msgid1,msgid2,n):\n if self._fallback:\n return self._fallback.ngettext(msgid1,msgid2,n)\n if n ==1:\n return msgid1\n else :\n return msgid2\n \n def lngettext(self,msgid1,msgid2,n):\n import warnings\n warnings.warn('lngettext() is deprecated, use ngettext() instead',\n DeprecationWarning,2)\n import locale\n if self._fallback:\n with warnings.catch_warnings():\n warnings.filterwarnings('ignore',r'.*\\blngettext\\b.*',\n DeprecationWarning)\n return self._fallback.lngettext(msgid1,msgid2,n)\n if n ==1:\n tmsg=msgid1\n else :\n tmsg=msgid2\n if self._output_charset:\n return tmsg.encode(self._output_charset)\n return tmsg.encode(locale.getpreferredencoding())\n \n def pgettext(self,context,message):\n if self._fallback:\n return self._fallback.pgettext(context,message)\n return message\n \n def npgettext(self,context,msgid1,msgid2,n):\n if self._fallback:\n return self._fallback.npgettext(context,msgid1,msgid2,n)\n if n ==1:\n return msgid1\n else :\n return msgid2\n \n def info(self):\n return self._info\n \n def charset(self):\n return self._charset\n \n def output_charset(self):\n import warnings\n warnings.warn('output_charset() is deprecated',\n DeprecationWarning,2)\n return self._output_charset\n \n def set_output_charset(self,charset):\n import warnings\n warnings.warn('set_output_charset() is deprecated',\n DeprecationWarning,2)\n self._output_charset=charset\n \n def install(self,names=None ):\n import builtins\n builtins.__dict__['_']=self.gettext\n if names is not None :\n allowed={'gettext','lgettext','lngettext',\n 'ngettext','npgettext','pgettext'}\n for name in allowed&set(names):\n builtins.__dict__[name]=getattr(self,name)\n \n \nclass GNUTranslations(NullTranslations):\n\n LE_MAGIC=0x950412de\n BE_MAGIC=0xde120495\n \n \n \n CONTEXT=\"%s\\x04%s\"\n \n \n VERSIONS=(0,1)\n \n def _get_versions(self,version):\n ''\n return (version >>16,version&0xffff)\n \n def _parse(self,fp):\n ''\n \n \n from struct import unpack\n filename=getattr(fp,'name','')\n \n \n self._catalog=catalog={}\n self.plural=lambda n:int(n !=1)\n buf=fp.read()\n buflen=len(buf)\n \n magic=unpack('4I',buf[4:20])\n ii='>II'\n else :\n raise OSError(0,'Bad magic number',filename)\n \n major_version,minor_version=self._get_versions(version)\n \n if major_version not in self.VERSIONS:\n raise OSError(0,'Bad version number '+str(major_version),filename)\n \n \n \n for i in range(0,msgcount):\n mlen,moff=unpack(ii,buf[masteridx:masteridx+8])\n mend=moff+mlen\n tlen,toff=unpack(ii,buf[transidx:transidx+8])\n tend=toff+tlen\n if mend startpos:\n parentpos=(pos -1)>>1\n parent=heap[parentpos]\n if newitem startpos:\n parentpos=(pos -1)>>1\n parent=heap[parentpos]\n if parent 1:\n try :\n while True :\n value,order,next=s=h[0]\n yield value\n s[0]=next()\n _heapreplace(h,s)\n except StopIteration:\n _heappop(h)\n if h:\n \n value,order,next=h[0]\n yield value\n yield from next.__self__\n return\n \n for order,it in enumerate(map(iter,iterables)):\n try :\n next=it.__next__\n value=next()\n h_append([key(value),order *direction,value,next])\n except StopIteration:\n pass\n _heapify(h)\n while len(h)>1:\n try :\n while True :\n key_value,order,value,next=s=h[0]\n yield value\n value=next()\n s[0]=key(value)\n s[2]=value\n _heapreplace(h,s)\n except StopIteration:\n _heappop(h)\n if h:\n key_value,order,value,next=h[0]\n yield value\n yield from next.__self__\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef nsmallest(n,iterable,key=None ):\n ''\n\n\n \n \n \n if n ==1:\n it=iter(iterable)\n sentinel=object()\n result=min(it,default=sentinel,key=key)\n return []if result is sentinel else [result]\n \n \n try :\n size=len(iterable)\n except (TypeError,AttributeError):\n pass\n else :\n if n >=size:\n return sorted(iterable,key=key)[:n]\n \n \n if key is None :\n it=iter(iterable)\n \n \n result=[(elem,i)for i,elem in zip(range(n),it)]\n if not result:\n return result\n _heapify_max(result)\n top=result[0][0]\n order=n\n _heapreplace=_heapreplace_max\n for elem in it:\n if elem =size:\n return sorted(iterable,key=key,reverse=True )[:n]\n \n \n if key is None :\n it=iter(iterable)\n result=[(elem,i)for i,elem in zip(range(0,-n,-1),it)]\n if not result:\n return result\n heapify(result)\n top=result[0][0]\n order=-n\n _heapreplace=heapreplace\n for elem in it:\n if top blocksize:\n key=self._digest_cons(key).digest()\n \n key=key.ljust(blocksize,b'\\0')\n self._outer.update(key.translate(trans_5C))\n self._inner.update(key.translate(trans_36))\n if msg is not None :\n self.update(msg)\n \n @property\n def name(self):\n return \"hmac-\"+self._inner.name\n \n @property\n def digest_cons(self):\n return self._digest_cons\n \n @property\n def inner(self):\n return self._inner\n \n @property\n def outer(self):\n return self._outer\n \n def update(self,msg):\n ''\n self._inner.update(msg)\n \n def copy(self):\n ''\n\n\n \n \n other=self.__class__.__new__(self.__class__)\n other._digest_cons=self._digest_cons\n other.digest_size=self.digest_size\n other._inner=self._inner.copy()\n other._outer=self._outer.copy()\n return other\n \n def _current(self):\n ''\n\n\n \n h=self._outer.copy()\n h.update(self._inner.digest())\n return h\n \n def digest(self):\n ''\n\n\n\n\n \n h=self._current()\n return h.digest()\n \n def hexdigest(self):\n ''\n \n h=self._current()\n return h.hexdigest()\n \ndef new(key,msg=None ,digestmod=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return HMAC(key,msg,digestmod)\n \n \ndef digest(key,msg,digest):\n ''\n\n\n\n\n\n\n \n if (_hashopenssl is not None and\n isinstance(digest,str)and digest in _openssl_md_meths):\n return _hashopenssl.hmac_digest(key,msg,digest)\n \n if callable(digest):\n digest_cons=digest\n elif isinstance(digest,str):\n digest_cons=lambda d=b'':_hashlib.new(digest,d)\n else :\n digest_cons=lambda d=b'':digest.new(d)\n \n inner=digest_cons()\n outer=digest_cons()\n blocksize=getattr(inner,'block_size',64)\n if len(key)>blocksize:\n key=digest_cons(key).digest()\n key=key+b'\\x00'*(blocksize -len(key))\n inner.update(key.translate(trans_36))\n outer.update(key.translate(trans_5C))\n inner.update(msg)\n outer.update(inner.digest())\n return outer.digest()\n", ["_hashlib", "_operator", "hashlib", "warnings"]], "imp": [".py", "''\n\n\n\n\n\n\n\nfrom _imp import (lock_held,acquire_lock,release_lock,\nget_frozen_object,is_frozen_package,\ninit_frozen,is_builtin,is_frozen,\n_fix_co_filename)\ntry :\n from _imp import create_dynamic\nexcept ImportError:\n\n create_dynamic=None\n \nfrom importlib._bootstrap import _ERR_MSG,_exec,_load,_builtin_from_name\nfrom importlib._bootstrap_external import SourcelessFileLoader\n\nfrom importlib import machinery\nfrom importlib import util\nimport importlib\nimport os\nimport sys\nimport tokenize\nimport types\nimport warnings\n\nwarnings.warn(\"the imp module is deprecated in favour of importlib; \"\n\"see the module's documentation for alternative uses\",\nDeprecationWarning,stacklevel=2)\n\n\nSEARCH_ERROR=0\nPY_SOURCE=1\nPY_COMPILED=2\nC_EXTENSION=3\nPY_RESOURCE=4\nPKG_DIRECTORY=5\nC_BUILTIN=6\nPY_FROZEN=7\nPY_CODERESOURCE=8\nIMP_HOOK=9\n\n\ndef new_module(name):\n ''\n\n\n\n\n\n \n return types.ModuleType(name)\n \n \ndef get_magic():\n ''\n\n\n \n return util.MAGIC_NUMBER\n \n \ndef get_tag():\n ''\n return sys.implementation.cache_tag\n \n \ndef cache_from_source(path,debug_override=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n return util.cache_from_source(path,debug_override)\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n\n\n \n return util.source_from_cache(path)\n \n \ndef get_suffixes():\n ''\n extensions=[(s,'rb',C_EXTENSION)for s in machinery.EXTENSION_SUFFIXES]\n source=[(s,'r',PY_SOURCE)for s in machinery.SOURCE_SUFFIXES]\n bytecode=[(s,'rb',PY_COMPILED)for s in machinery.BYTECODE_SUFFIXES]\n \n return extensions+source+bytecode\n \n \nclass NullImporter:\n\n ''\n\n\n\n \n \n def __init__(self,path):\n if path =='':\n raise ImportError('empty pathname',path='')\n elif os.path.isdir(path):\n raise ImportError('existing directory',path=path)\n \n def find_module(self,fullname):\n ''\n return None\n \n \nclass _HackedGetData:\n\n ''\n \n \n def __init__(self,fullname,path,file=None ):\n super().__init__(fullname,path)\n self.file=file\n \n def get_data(self,path):\n ''\n if self.file and path ==self.path:\n \n \n if not self.file.closed:\n file=self.file\n if 'b'not in file.mode:\n file.close()\n if self.file.closed:\n self.file=file=open(self.path,'rb')\n \n with file:\n return file.read()\n else :\n return super().get_data(path)\n \n \nclass _LoadSourceCompatibility(_HackedGetData,machinery.SourceFileLoader):\n\n ''\n \n \ndef load_source(name,pathname,file=None ):\n loader=_LoadSourceCompatibility(name,pathname,file)\n spec=util.spec_from_file_location(name,pathname,loader=loader)\n if name in sys.modules:\n module=_exec(spec,sys.modules[name])\n else :\n module=_load(spec)\n \n \n module.__loader__=machinery.SourceFileLoader(name,pathname)\n module.__spec__.loader=module.__loader__\n return module\n \n \nclass _LoadCompiledCompatibility(_HackedGetData,SourcelessFileLoader):\n\n ''\n \n \ndef load_compiled(name,pathname,file=None ):\n ''\n loader=_LoadCompiledCompatibility(name,pathname,file)\n spec=util.spec_from_file_location(name,pathname,loader=loader)\n if name in sys.modules:\n module=_exec(spec,sys.modules[name])\n else :\n module=_load(spec)\n \n \n module.__loader__=SourcelessFileLoader(name,pathname)\n module.__spec__.loader=module.__loader__\n return module\n \n \ndef load_package(name,path):\n ''\n if os.path.isdir(path):\n extensions=(machinery.SOURCE_SUFFIXES[:]+\n machinery.BYTECODE_SUFFIXES[:])\n for extension in extensions:\n init_path=os.path.join(path,'__init__'+extension)\n if os.path.exists(init_path):\n path=init_path\n break\n else :\n raise ValueError('{!r} is not a package'.format(path))\n spec=util.spec_from_file_location(name,path,\n submodule_search_locations=[])\n if name in sys.modules:\n return _exec(spec,sys.modules[name])\n else :\n return _load(spec)\n \n \ndef load_module(name,file,filename,details):\n ''\n\n\n\n\n\n \n suffix,mode,type_=details\n if mode and (not mode.startswith(('r','U'))or '+'in mode):\n raise ValueError('invalid file open mode {!r}'.format(mode))\n elif file is None and type_ in {PY_SOURCE,PY_COMPILED}:\n msg='file object required for import (type code {})'.format(type_)\n raise ValueError(msg)\n elif type_ ==PY_SOURCE:\n return load_source(name,filename,file)\n elif type_ ==PY_COMPILED:\n return load_compiled(name,filename,file)\n elif type_ ==C_EXTENSION and load_dynamic is not None :\n if file is None :\n with open(filename,'rb')as opened_file:\n return load_dynamic(name,filename,opened_file)\n else :\n return load_dynamic(name,filename,file)\n elif type_ ==PKG_DIRECTORY:\n return load_package(name,filename)\n elif type_ ==C_BUILTIN:\n return init_builtin(name)\n elif type_ ==PY_FROZEN:\n return init_frozen(name)\n else :\n msg=\"Don't know how to import {} (type code {})\".format(name,type_)\n raise ImportError(msg,name=name)\n \n \ndef find_module(name,path=None ):\n ''\n\n\n\n\n\n\n\n\n \n if not isinstance(name,str):\n raise TypeError(\"'name' must be a str, not {}\".format(type(name)))\n elif not isinstance(path,(type(None ),list)):\n \n raise RuntimeError(\"'path' must be None or a list, \"\n \"not {}\".format(type(path)))\n \n if path is None :\n if is_builtin(name):\n return None ,None ,('','',C_BUILTIN)\n elif is_frozen(name):\n return None ,None ,('','',PY_FROZEN)\n else :\n path=sys.path\n \n for entry in path:\n package_directory=os.path.join(entry,name)\n for suffix in ['.py',machinery.BYTECODE_SUFFIXES[0]]:\n package_file_name='__init__'+suffix\n file_path=os.path.join(package_directory,package_file_name)\n if os.path.isfile(file_path):\n return None ,package_directory,('','',PKG_DIRECTORY)\n for suffix,mode,type_ in get_suffixes():\n file_name=name+suffix\n file_path=os.path.join(entry,file_name)\n if os.path.isfile(file_path):\n break\n else :\n continue\n break\n else :\n raise ImportError(_ERR_MSG.format(name),name=name)\n \n encoding=None\n if 'b'not in mode:\n with open(file_path,'rb')as file:\n encoding=tokenize.detect_encoding(file.readline)[0]\n file=open(file_path,mode,encoding=encoding)\n return file,file_path,(suffix,mode,type_)\n \n \ndef reload(module):\n ''\n\n\n\n\n\n \n return importlib.reload(module)\n \n \ndef init_builtin(name):\n ''\n\n\n\n \n try :\n return _builtin_from_name(name)\n except ImportError:\n return None\n \n \nif create_dynamic:\n def load_dynamic(name,path,file=None ):\n ''\n\n\n \n import importlib.machinery\n loader=importlib.machinery.ExtensionFileLoader(name,path)\n \n \n \n spec=importlib.machinery.ModuleSpec(\n name=name,loader=loader,origin=path)\n return _load(spec)\n \nelse :\n load_dynamic=None\n", ["_imp", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "importlib.machinery", "importlib.util", "os", "sys", "tokenize", "types", "warnings"]], "inspect": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__=('Ka-Ping Yee ',\n'Yury Selivanov ')\n\nimport abc\nimport dis\nimport collections.abc\nimport enum\nimport importlib.machinery\nimport itertools\nimport linecache\nimport os\nimport re\nimport sys\nimport tokenize\nimport token\nimport types\nimport warnings\nimport functools\nimport builtins\nfrom operator import attrgetter\nfrom collections import namedtuple,OrderedDict\n\n\n\nmod_dict=globals()\nfor k,v in dis.COMPILER_FLAG_NAMES.items():\n mod_dict[\"CO_\"+v]=k\n \n \nTPFLAGS_IS_ABSTRACT=1 <<20\n\n\ndef ismodule(object):\n ''\n\n\n\n\n \n return isinstance(object,types.ModuleType)\n \ndef isclass(object):\n ''\n\n\n\n \n return isinstance(object,type)\n \ndef ismethod(object):\n ''\n\n\n\n\n\n \n return isinstance(object,types.MethodType)\n \ndef ismethoddescriptor(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if isclass(object)or ismethod(object)or isfunction(object):\n \n return False\n tp=type(object)\n return hasattr(tp,\"__get__\")and not hasattr(tp,\"__set__\")\n \ndef isdatadescriptor(object):\n ''\n\n\n\n\n\n \n if isclass(object)or ismethod(object)or isfunction(object):\n \n return False\n tp=type(object)\n return hasattr(tp,\"__set__\")or hasattr(tp,\"__delete__\")\n \nif hasattr(types,'MemberDescriptorType'):\n\n def ismemberdescriptor(object):\n ''\n\n\n \n return isinstance(object,types.MemberDescriptorType)\nelse :\n\n def ismemberdescriptor(object):\n ''\n\n\n \n return False\n \nif hasattr(types,'GetSetDescriptorType'):\n\n def isgetsetdescriptor(object):\n ''\n\n\n \n return isinstance(object,types.GetSetDescriptorType)\nelse :\n\n def isgetsetdescriptor(object):\n ''\n\n\n \n return False\n \ndef isfunction(object):\n ''\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.FunctionType)\n \ndef _has_code_flag(f,flag):\n ''\n\n \n while ismethod(f):\n f=f.__func__\n f=functools._unwrap_partial(f)\n if not isfunction(f):\n return False\n return bool(f.__code__.co_flags&flag)\n \ndef isgeneratorfunction(obj):\n ''\n\n\n \n return _has_code_flag(obj,CO_GENERATOR)\n \ndef iscoroutinefunction(obj):\n ''\n\n\n \n return _has_code_flag(obj,CO_COROUTINE)\n \ndef isasyncgenfunction(obj):\n ''\n\n\n\n \n return _has_code_flag(obj,CO_ASYNC_GENERATOR)\n \ndef isasyncgen(object):\n ''\n return isinstance(object,types.AsyncGeneratorType)\n \ndef isgenerator(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.GeneratorType)\n \ndef iscoroutine(object):\n ''\n return isinstance(object,types.CoroutineType)\n \ndef isawaitable(object):\n ''\n return (isinstance(object,types.CoroutineType)or\n isinstance(object,types.GeneratorType)and\n bool(object.gi_code.co_flags&CO_ITERABLE_COROUTINE)or\n isinstance(object,collections.abc.Awaitable))\n \ndef istraceback(object):\n ''\n\n\n\n\n\n \n return isinstance(object,types.TracebackType)\n \ndef isframe(object):\n ''\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.FrameType)\n \ndef iscode(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return isinstance(object,types.CodeType)\n \ndef isbuiltin(object):\n ''\n\n\n\n\n \n return isinstance(object,types.BuiltinFunctionType)\n \ndef isroutine(object):\n ''\n return (isbuiltin(object)\n or isfunction(object)\n or ismethod(object)\n or ismethoddescriptor(object))\n \ndef isabstract(object):\n ''\n if not isinstance(object,type):\n return False\n if object.__flags__&TPFLAGS_IS_ABSTRACT:\n return True\n if not issubclass(type(object),abc.ABCMeta):\n return False\n if hasattr(object,'__abstractmethods__'):\n \n \n return False\n \n \n for name,value in object.__dict__.items():\n if getattr(value,\"__isabstractmethod__\",False ):\n return True\n for base in object.__bases__:\n for name in getattr(base,\"__abstractmethods__\",()):\n value=getattr(object,name,None )\n if getattr(value,\"__isabstractmethod__\",False ):\n return True\n return False\n \ndef getmembers(object,predicate=None ):\n ''\n \n if isclass(object):\n mro=(object,)+getmro(object)\n else :\n mro=()\n results=[]\n processed=set()\n names=dir(object)\n \n \n \n try :\n for base in object.__bases__:\n for k,v in base.__dict__.items():\n if isinstance(v,types.DynamicClassAttribute):\n names.append(k)\n except AttributeError:\n pass\n for key in names:\n \n \n \n try :\n value=getattr(object,key)\n \n if key in processed:\n raise AttributeError\n except AttributeError:\n for base in mro:\n if key in base.__dict__:\n value=base.__dict__[key]\n break\n else :\n \n \n continue\n if not predicate or predicate(value):\n results.append((key,value))\n processed.add(key)\n results.sort(key=lambda pair:pair[0])\n return results\n \nAttribute=namedtuple('Attribute','name kind defining_class object')\n\ndef classify_class_attrs(cls):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n mro=getmro(cls)\n metamro=getmro(type(cls))\n metamro=tuple(cls for cls in metamro if cls not in (type,object))\n class_bases=(cls,)+mro\n all_bases=class_bases+metamro\n names=dir(cls)\n \n \n \n for base in mro:\n for k,v in base.__dict__.items():\n if isinstance(v,types.DynamicClassAttribute):\n names.append(k)\n result=[]\n processed=set()\n \n for name in names:\n \n \n \n \n \n \n \n \n \n homecls=None\n get_obj=None\n dict_obj=None\n if name not in processed:\n try :\n if name =='__dict__':\n raise Exception(\"__dict__ is special, don't want the proxy\")\n get_obj=getattr(cls,name)\n except Exception as exc:\n pass\n else :\n homecls=getattr(get_obj,\"__objclass__\",homecls)\n if homecls not in class_bases:\n \n \n homecls=None\n last_cls=None\n \n for srch_cls in class_bases:\n srch_obj=getattr(srch_cls,name,None )\n if srch_obj is get_obj:\n last_cls=srch_cls\n \n for srch_cls in metamro:\n try :\n srch_obj=srch_cls.__getattr__(cls,name)\n except AttributeError:\n continue\n if srch_obj is get_obj:\n last_cls=srch_cls\n if last_cls is not None :\n homecls=last_cls\n for base in all_bases:\n if name in base.__dict__:\n dict_obj=base.__dict__[name]\n if homecls not in metamro:\n homecls=base\n break\n if homecls is None :\n \n \n continue\n obj=get_obj if get_obj is not None else dict_obj\n \n if isinstance(dict_obj,(staticmethod,types.BuiltinMethodType)):\n kind=\"static method\"\n obj=dict_obj\n elif isinstance(dict_obj,(classmethod,types.ClassMethodDescriptorType)):\n kind=\"class method\"\n obj=dict_obj\n elif isinstance(dict_obj,property):\n kind=\"property\"\n obj=dict_obj\n elif isroutine(obj):\n kind=\"method\"\n else :\n kind=\"data\"\n result.append(Attribute(name,kind,homecls,obj))\n processed.add(name)\n return result\n \n \n \ndef getmro(cls):\n ''\n return cls.__mro__\n \n \n \ndef unwrap(func,*,stop=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if stop is None :\n def _is_wrapper(f):\n return hasattr(f,'__wrapped__')\n else :\n def _is_wrapper(f):\n return hasattr(f,'__wrapped__')and not stop(f)\n f=func\n \n \n memo={id(f):f}\n recursion_limit=sys.getrecursionlimit()\n while _is_wrapper(func):\n func=func.__wrapped__\n id_func=id(func)\n if (id_func in memo)or (len(memo)>=recursion_limit):\n raise ValueError('wrapper loop when unwrapping {!r}'.format(f))\n memo[id_func]=func\n return func\n \n \ndef indentsize(line):\n ''\n expline=line.expandtabs()\n return len(expline)-len(expline.lstrip())\n \ndef _findclass(func):\n cls=sys.modules.get(func.__module__)\n if cls is None :\n return None\n for name in func.__qualname__.split('.')[:-1]:\n cls=getattr(cls,name)\n if not isclass(cls):\n return None\n return cls\n \ndef _finddoc(obj):\n if isclass(obj):\n for base in obj.__mro__:\n if base is not object:\n try :\n doc=base.__doc__\n except AttributeError:\n continue\n if doc is not None :\n return doc\n return None\n \n if ismethod(obj):\n name=obj.__func__.__name__\n self=obj.__self__\n if (isclass(self)and\n getattr(getattr(self,name,None ),'__func__')is obj.__func__):\n \n cls=self\n else :\n cls=self.__class__\n elif isfunction(obj):\n name=obj.__name__\n cls=_findclass(obj)\n if cls is None or getattr(cls,name)is not obj:\n return None\n elif isbuiltin(obj):\n name=obj.__name__\n self=obj.__self__\n if (isclass(self)and\n self.__qualname__+'.'+name ==obj.__qualname__):\n \n cls=self\n else :\n cls=self.__class__\n \n elif isinstance(obj,property):\n func=obj.fget\n name=func.__name__\n cls=_findclass(func)\n if cls is None or getattr(cls,name)is not obj:\n return None\n elif ismethoddescriptor(obj)or isdatadescriptor(obj):\n name=obj.__name__\n cls=obj.__objclass__\n if getattr(cls,name)is not obj:\n return None\n if ismemberdescriptor(obj):\n slots=getattr(cls,'__slots__',None )\n if isinstance(slots,dict)and name in slots:\n return slots[name]\n else :\n return None\n for base in cls.__mro__:\n try :\n doc=getattr(base,name).__doc__\n except AttributeError:\n continue\n if doc is not None :\n return doc\n return None\n \ndef getdoc(object):\n ''\n\n\n\n \n try :\n doc=object.__doc__\n except AttributeError:\n return None\n if doc is None :\n try :\n doc=_finddoc(object)\n except (AttributeError,TypeError):\n return None\n if not isinstance(doc,str):\n return None\n return cleandoc(doc)\n \ndef cleandoc(doc):\n ''\n\n\n \n try :\n lines=doc.expandtabs().split('\\n')\n except UnicodeError:\n return None\n else :\n \n margin=sys.maxsize\n for line in lines[1:]:\n content=len(line.lstrip())\n if content:\n indent=len(line)-content\n margin=min(margin,indent)\n \n if lines:\n lines[0]=lines[0].lstrip()\n if margin ')):\n raise OSError('source code not available')\n \n module=getmodule(object,file)\n if module:\n lines=linecache.getlines(file,module.__dict__)\n else :\n lines=linecache.getlines(file)\n if not lines:\n raise OSError('could not get source code')\n \n if ismodule(object):\n return lines,0\n \n if isclass(object):\n name=object.__name__\n pat=re.compile(r'^(\\s*)class\\s*'+name+r'\\b')\n \n \n \n candidates=[]\n for i in range(len(lines)):\n match=pat.match(lines[i])\n if match:\n \n if lines[i][0]=='c':\n return lines,i\n \n candidates.append((match.group(1),i))\n if candidates:\n \n \n candidates.sort()\n return lines,candidates[0][1]\n else :\n raise OSError('could not find class definition')\n \n if ismethod(object):\n object=object.__func__\n if isfunction(object):\n object=object.__code__\n if istraceback(object):\n object=object.tb_frame\n if isframe(object):\n object=object.f_code\n if iscode(object):\n if not hasattr(object,'co_firstlineno'):\n raise OSError('could not find function definition')\n lnum=object.co_firstlineno -1\n pat=re.compile(r'^(\\s*def\\s)|(\\s*async\\s+def\\s)|(.*(?0:\n if pat.match(lines[lnum]):break\n lnum=lnum -1\n return lines,lnum\n raise OSError('could not find code object')\n \ndef getcomments(object):\n ''\n\n\n \n try :\n lines,lnum=findsource(object)\n except (OSError,TypeError):\n return None\n \n if ismodule(object):\n \n start=0\n if lines and lines[0][:2]=='#!':start=1\n while start 0:\n indent=indentsize(lines[lnum])\n end=lnum -1\n if end >=0 and lines[end].lstrip()[:1]=='#'and\\\n indentsize(lines[end])==indent:\n comments=[lines[end].expandtabs().lstrip()]\n if end >0:\n end=end -1\n comment=lines[end].expandtabs().lstrip()\n while comment[:1]=='#'and indentsize(lines[end])==indent:\n comments[:0]=[comment]\n end=end -1\n if end <0:break\n comment=lines[end].expandtabs().lstrip()\n while comments and comments[0].strip()=='#':\n comments[:1]=[]\n while comments and comments[-1].strip()=='#':\n comments[-1:]=[]\n return ''.join(comments)\n \nclass EndOfBlock(Exception):pass\n\nclass BlockFinder:\n ''\n def __init__(self):\n self.indent=0\n self.islambda=False\n self.started=False\n self.passline=False\n self.indecorator=False\n self.decoratorhasargs=False\n self.last=1\n \n def tokeneater(self,type,token,srowcol,erowcol,line):\n if not self.started and not self.indecorator:\n \n if token ==\"@\":\n self.indecorator=True\n \n elif token in (\"def\",\"class\",\"lambda\"):\n if token ==\"lambda\":\n self.islambda=True\n self.started=True\n self.passline=True\n elif token ==\"(\":\n if self.indecorator:\n self.decoratorhasargs=True\n elif token ==\")\":\n if self.indecorator:\n self.indecorator=False\n self.decoratorhasargs=False\n elif type ==tokenize.NEWLINE:\n self.passline=False\n self.last=srowcol[0]\n if self.islambda:\n raise EndOfBlock\n \n \n if self.indecorator and not self.decoratorhasargs:\n self.indecorator=False\n elif self.passline:\n pass\n elif type ==tokenize.INDENT:\n self.indent=self.indent+1\n self.passline=True\n elif type ==tokenize.DEDENT:\n self.indent=self.indent -1\n \n \n \n if self.indent <=0:\n raise EndOfBlock\n elif self.indent ==0 and type not in (tokenize.COMMENT,tokenize.NL):\n \n \n raise EndOfBlock\n \ndef getblock(lines):\n ''\n blockfinder=BlockFinder()\n try :\n tokens=tokenize.generate_tokens(iter(lines).__next__)\n for _token in tokens:\n blockfinder.tokeneater(*_token)\n except (EndOfBlock,IndentationError):\n pass\n return lines[:blockfinder.last]\n \ndef getsourcelines(object):\n ''\n\n\n\n\n\n \n object=unwrap(object)\n lines,lnum=findsource(object)\n \n if istraceback(object):\n object=object.tb_frame\n \n \n if (ismodule(object)or\n (isframe(object)and object.f_code.co_name ==\"\")):\n return lines,0\n else :\n return getblock(lines[lnum:]),lnum+1\n \ndef getsource(object):\n ''\n\n\n\n \n lines,lnum=getsourcelines(object)\n return ''.join(lines)\n \n \ndef walktree(classes,children,parent):\n ''\n results=[]\n classes.sort(key=attrgetter('__module__','__name__'))\n for c in classes:\n results.append((c,c.__bases__))\n if c in children:\n results.append(walktree(children[c],children,c))\n return results\n \ndef getclasstree(classes,unique=False ):\n ''\n\n\n\n\n\n\n \n children={}\n roots=[]\n for c in classes:\n if c.__bases__:\n for parent in c.__bases__:\n if parent not in children:\n children[parent]=[]\n if c not in children[parent]:\n children[parent].append(c)\n if unique and parent in classes:break\n elif c not in roots:\n roots.append(c)\n for parent in children:\n if parent not in classes:\n roots.append(parent)\n return walktree(roots,children,None )\n \n \nArguments=namedtuple('Arguments','args, varargs, varkw')\n\ndef getargs(co):\n ''\n\n\n\n\n \n if not iscode(co):\n raise TypeError('{!r} is not a code object'.format(co))\n \n names=co.co_varnames\n nargs=co.co_argcount\n nkwargs=co.co_kwonlyargcount\n args=list(names[:nargs])\n kwonlyargs=list(names[nargs:nargs+nkwargs])\n step=0\n \n nargs +=nkwargs\n varargs=None\n if co.co_flags&CO_VARARGS:\n varargs=co.co_varnames[nargs]\n nargs=nargs+1\n varkw=None\n if co.co_flags&CO_VARKEYWORDS:\n varkw=co.co_varnames[nargs]\n return Arguments(args+kwonlyargs,varargs,varkw)\n \nArgSpec=namedtuple('ArgSpec','args varargs keywords defaults')\n\ndef getargspec(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n warnings.warn(\"inspect.getargspec() is deprecated since Python 3.0, \"\n \"use inspect.signature() or inspect.getfullargspec()\",\n DeprecationWarning,stacklevel=2)\n args,varargs,varkw,defaults,kwonlyargs,kwonlydefaults,ann=\\\n getfullargspec(func)\n if kwonlyargs or ann:\n raise ValueError(\"Function has keyword-only parameters or annotations\"\n \", use inspect.signature() API which can support them\")\n return ArgSpec(args,varargs,varkw,defaults)\n \nFullArgSpec=namedtuple('FullArgSpec',\n'args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations')\n\ndef getfullargspec(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n sig=_signature_from_callable(func,\n follow_wrapper_chains=False ,\n skip_bound_arg=False ,\n sigcls=Signature)\n except Exception as ex:\n \n \n \n \n raise TypeError('unsupported callable')from ex\n \n args=[]\n varargs=None\n varkw=None\n posonlyargs=[]\n kwonlyargs=[]\n defaults=()\n annotations={}\n defaults=()\n kwdefaults={}\n \n if sig.return_annotation is not sig.empty:\n annotations['return']=sig.return_annotation\n \n for param in sig.parameters.values():\n kind=param.kind\n name=param.name\n \n if kind is _POSITIONAL_ONLY:\n posonlyargs.append(name)\n if param.default is not param.empty:\n defaults +=(param.default,)\n elif kind is _POSITIONAL_OR_KEYWORD:\n args.append(name)\n if param.default is not param.empty:\n defaults +=(param.default,)\n elif kind is _VAR_POSITIONAL:\n varargs=name\n elif kind is _KEYWORD_ONLY:\n kwonlyargs.append(name)\n if param.default is not param.empty:\n kwdefaults[name]=param.default\n elif kind is _VAR_KEYWORD:\n varkw=name\n \n if param.annotation is not param.empty:\n annotations[name]=param.annotation\n \n if not kwdefaults:\n \n kwdefaults=None\n \n if not defaults:\n \n defaults=None\n \n return FullArgSpec(posonlyargs+args,varargs,varkw,defaults,\n kwonlyargs,kwdefaults,annotations)\n \n \nArgInfo=namedtuple('ArgInfo','args varargs keywords locals')\n\ndef getargvalues(frame):\n ''\n\n\n\n\n \n args,varargs,varkw=getargs(frame.f_code)\n return ArgInfo(args,varargs,varkw,frame.f_locals)\n \ndef formatannotation(annotation,base_module=None ):\n if getattr(annotation,'__module__',None )=='typing':\n return repr(annotation).replace('typing.','')\n if isinstance(annotation,type):\n if annotation.__module__ in ('builtins',base_module):\n return annotation.__qualname__\n return annotation.__module__+'.'+annotation.__qualname__\n return repr(annotation)\n \ndef formatannotationrelativeto(object):\n module=getattr(object,'__module__',None )\n def _formatannotation(annotation):\n return formatannotation(annotation,module)\n return _formatannotation\n \ndef formatargspec(args,varargs=None ,varkw=None ,defaults=None ,\nkwonlyargs=(),kwonlydefaults={},annotations={},\nformatarg=str,\nformatvarargs=lambda name:'*'+name,\nformatvarkw=lambda name:'**'+name,\nformatvalue=lambda value:'='+repr(value),\nformatreturns=lambda text:' -> '+text,\nformatannotation=formatannotation):\n ''\n\n\n\n\n\n\n\n\n\n \n \n from warnings import warn\n \n warn(\"`formatargspec` is deprecated since Python 3.5. Use `signature` and \"\n \"the `Signature` object directly\",\n DeprecationWarning,\n stacklevel=2)\n \n def formatargandannotation(arg):\n result=formatarg(arg)\n if arg in annotations:\n result +=': '+formatannotation(annotations[arg])\n return result\n specs=[]\n if defaults:\n firstdefault=len(args)-len(defaults)\n for i,arg in enumerate(args):\n spec=formatargandannotation(arg)\n if defaults and i >=firstdefault:\n spec=spec+formatvalue(defaults[i -firstdefault])\n specs.append(spec)\n if varargs is not None :\n specs.append(formatvarargs(formatargandannotation(varargs)))\n else :\n if kwonlyargs:\n specs.append('*')\n if kwonlyargs:\n for kwonlyarg in kwonlyargs:\n spec=formatargandannotation(kwonlyarg)\n if kwonlydefaults and kwonlyarg in kwonlydefaults:\n spec +=formatvalue(kwonlydefaults[kwonlyarg])\n specs.append(spec)\n if varkw is not None :\n specs.append(formatvarkw(formatargandannotation(varkw)))\n result='('+', '.join(specs)+')'\n if 'return'in annotations:\n result +=formatreturns(formatannotation(annotations['return']))\n return result\n \ndef formatargvalues(args,varargs,varkw,locals,\nformatarg=str,\nformatvarargs=lambda name:'*'+name,\nformatvarkw=lambda name:'**'+name,\nformatvalue=lambda value:'='+repr(value)):\n ''\n\n\n\n\n \n def convert(name,locals=locals,\n formatarg=formatarg,formatvalue=formatvalue):\n return formatarg(name)+formatvalue(locals[name])\n specs=[]\n for i in range(len(args)):\n specs.append(convert(args[i]))\n if varargs:\n specs.append(formatvarargs(varargs)+formatvalue(locals[varargs]))\n if varkw:\n specs.append(formatvarkw(varkw)+formatvalue(locals[varkw]))\n return '('+', '.join(specs)+')'\n \ndef _missing_arguments(f_name,argnames,pos,values):\n names=[repr(name)for name in argnames if name not in values]\n missing=len(names)\n if missing ==1:\n s=names[0]\n elif missing ==2:\n s=\"{} and {}\".format(*names)\n else :\n tail=\", {} and {}\".format(*names[-2:])\n del names[-2:]\n s=\", \".join(names)+tail\n raise TypeError(\"%s() missing %i required %s argument%s: %s\"%\n (f_name,missing,\n \"positional\"if pos else \"keyword-only\",\n \"\"if missing ==1 else \"s\",s))\n \ndef _too_many(f_name,args,kwonly,varargs,defcount,given,values):\n atleast=len(args)-defcount\n kwonly_given=len([arg for arg in kwonly if arg in values])\n if varargs:\n plural=atleast !=1\n sig=\"at least %d\"%(atleast,)\n elif defcount:\n plural=True\n sig=\"from %d to %d\"%(atleast,len(args))\n else :\n plural=len(args)!=1\n sig=str(len(args))\n kwonly_sig=\"\"\n if kwonly_given:\n msg=\" positional argument%s (and %d keyword-only argument%s)\"\n kwonly_sig=(msg %(\"s\"if given !=1 else \"\",kwonly_given,\n \"s\"if kwonly_given !=1 else \"\"))\n raise TypeError(\"%s() takes %s positional argument%s but %d%s %s given\"%\n (f_name,sig,\"s\"if plural else \"\",given,kwonly_sig,\n \"was\"if given ==1 and not kwonly_given else \"were\"))\n \ndef getcallargs(func,/,*positional,**named):\n ''\n\n\n\n \n spec=getfullargspec(func)\n args,varargs,varkw,defaults,kwonlyargs,kwonlydefaults,ann=spec\n f_name=func.__name__\n arg2value={}\n \n \n if ismethod(func)and func.__self__ is not None :\n \n positional=(func.__self__,)+positional\n num_pos=len(positional)\n num_args=len(args)\n num_defaults=len(defaults)if defaults else 0\n \n n=min(num_pos,num_args)\n for i in range(n):\n arg2value[args[i]]=positional[i]\n if varargs:\n arg2value[varargs]=tuple(positional[n:])\n possible_kwargs=set(args+kwonlyargs)\n if varkw:\n arg2value[varkw]={}\n for kw,value in named.items():\n if kw not in possible_kwargs:\n if not varkw:\n raise TypeError(\"%s() got an unexpected keyword argument %r\"%\n (f_name,kw))\n arg2value[varkw][kw]=value\n continue\n if kw in arg2value:\n raise TypeError(\"%s() got multiple values for argument %r\"%\n (f_name,kw))\n arg2value[kw]=value\n if num_pos >num_args and not varargs:\n _too_many(f_name,args,kwonlyargs,varargs,num_defaults,\n num_pos,arg2value)\n if num_pos 0:\n start=lineno -1 -context //2\n try :\n lines,lnum=findsource(frame)\n except OSError:\n lines=index=None\n else :\n start=max(0,min(start,len(lines)-context))\n lines=lines[start:start+context]\n index=lineno -1 -start\n else :\n lines=index=None\n \n return Traceback(filename,lineno,frame.f_code.co_name,lines,index)\n \ndef getlineno(frame):\n ''\n \n return frame.f_lineno\n \nFrameInfo=namedtuple('FrameInfo',('frame',)+Traceback._fields)\n\ndef getouterframes(frame,context=1):\n ''\n\n\n \n framelist=[]\n while frame:\n frameinfo=(frame,)+getframeinfo(frame,context)\n framelist.append(FrameInfo(*frameinfo))\n frame=frame.f_back\n return framelist\n \ndef getinnerframes(tb,context=1):\n ''\n\n\n \n framelist=[]\n while tb:\n frameinfo=(tb.tb_frame,)+getframeinfo(tb,context)\n framelist.append(FrameInfo(*frameinfo))\n tb=tb.tb_next\n return framelist\n \ndef currentframe():\n ''\n return sys._getframe(1)if hasattr(sys,\"_getframe\")else None\n \ndef stack(context=1):\n ''\n return getouterframes(sys._getframe(1),context)\n \ndef trace(context=1):\n ''\n return getinnerframes(sys.exc_info()[2],context)\n \n \n \n \n_sentinel=object()\n\ndef _static_getmro(klass):\n return type.__dict__['__mro__'].__get__(klass)\n \ndef _check_instance(obj,attr):\n instance_dict={}\n try :\n instance_dict=object.__getattribute__(obj,\"__dict__\")\n except AttributeError:\n pass\n return dict.get(instance_dict,attr,_sentinel)\n \n \ndef _check_class(klass,attr):\n for entry in _static_getmro(klass):\n if _shadowed_dict(type(entry))is _sentinel:\n try :\n return entry.__dict__[attr]\n except KeyError:\n pass\n return _sentinel\n \ndef _is_type(obj):\n try :\n _static_getmro(obj)\n except TypeError:\n return False\n return True\n \ndef _shadowed_dict(klass):\n dict_attr=type.__dict__[\"__dict__\"]\n for entry in _static_getmro(klass):\n try :\n class_dict=dict_attr.__get__(entry)[\"__dict__\"]\n except KeyError:\n pass\n else :\n if not (type(class_dict)is types.GetSetDescriptorType and\n class_dict.__name__ ==\"__dict__\"and\n class_dict.__objclass__ is entry):\n return class_dict\n return _sentinel\n \ndef getattr_static(obj,attr,default=_sentinel):\n ''\n\n\n\n\n\n\n\n\n \n instance_result=_sentinel\n if not _is_type(obj):\n klass=type(obj)\n dict_attr=_shadowed_dict(klass)\n if (dict_attr is _sentinel or\n type(dict_attr)is types.MemberDescriptorType):\n instance_result=_check_instance(obj,attr)\n else :\n klass=obj\n \n klass_result=_check_class(klass,attr)\n \n if instance_result is not _sentinel and klass_result is not _sentinel:\n if (_check_class(type(klass_result),'__get__')is not _sentinel and\n _check_class(type(klass_result),'__set__')is not _sentinel):\n return klass_result\n \n if instance_result is not _sentinel:\n return instance_result\n if klass_result is not _sentinel:\n return klass_result\n \n if obj is klass:\n \n for entry in _static_getmro(type(klass)):\n if _shadowed_dict(type(entry))is _sentinel:\n try :\n return entry.__dict__[attr]\n except KeyError:\n pass\n if default is not _sentinel:\n return default\n raise AttributeError(attr)\n \n \n \n \nGEN_CREATED='GEN_CREATED'\nGEN_RUNNING='GEN_RUNNING'\nGEN_SUSPENDED='GEN_SUSPENDED'\nGEN_CLOSED='GEN_CLOSED'\n\ndef getgeneratorstate(generator):\n ''\n\n\n\n\n\n\n \n if generator.gi_running:\n return GEN_RUNNING\n if generator.gi_frame is None :\n return GEN_CLOSED\n if generator.gi_frame.f_lasti ==-1:\n return GEN_CREATED\n return GEN_SUSPENDED\n \n \ndef getgeneratorlocals(generator):\n ''\n\n\n\n \n \n if not isgenerator(generator):\n raise TypeError(\"{!r} is not a Python generator\".format(generator))\n \n frame=getattr(generator,\"gi_frame\",None )\n if frame is not None :\n return generator.gi_frame.f_locals\n else :\n return {}\n \n \n \n \nCORO_CREATED='CORO_CREATED'\nCORO_RUNNING='CORO_RUNNING'\nCORO_SUSPENDED='CORO_SUSPENDED'\nCORO_CLOSED='CORO_CLOSED'\n\ndef getcoroutinestate(coroutine):\n ''\n\n\n\n\n\n\n \n if coroutine.cr_running:\n return CORO_RUNNING\n if coroutine.cr_frame is None :\n return CORO_CLOSED\n if coroutine.cr_frame.f_lasti ==-1:\n return CORO_CREATED\n return CORO_SUSPENDED\n \n \ndef getcoroutinelocals(coroutine):\n ''\n\n\n\n \n frame=getattr(coroutine,\"cr_frame\",None )\n if frame is not None :\n return frame.f_locals\n else :\n return {}\n \n \n \n \n \n \n \n_WrapperDescriptor=type(type.__call__)\n_MethodWrapper=type(all.__call__)\n_ClassMethodWrapper=type(int.__dict__['from_bytes'])\n\n_NonUserDefinedCallables=(_WrapperDescriptor,\n_MethodWrapper,\n_ClassMethodWrapper,\ntypes.BuiltinFunctionType)\n\n\ndef _signature_get_user_defined_method(cls,method_name):\n ''\n\n\n \n try :\n meth=getattr(cls,method_name)\n except AttributeError:\n return\n else :\n if not isinstance(meth,_NonUserDefinedCallables):\n \n \n return meth\n \n \ndef _signature_get_partial(wrapped_sig,partial,extra_args=()):\n ''\n\n\n \n \n old_params=wrapped_sig.parameters\n new_params=OrderedDict(old_params.items())\n \n partial_args=partial.args or ()\n partial_keywords=partial.keywords or {}\n \n if extra_args:\n partial_args=extra_args+partial_args\n \n try :\n ba=wrapped_sig.bind_partial(*partial_args,**partial_keywords)\n except TypeError as ex:\n msg='partial object {!r} has incorrect arguments'.format(partial)\n raise ValueError(msg)from ex\n \n \n transform_to_kwonly=False\n for param_name,param in old_params.items():\n try :\n arg_value=ba.arguments[param_name]\n except KeyError:\n pass\n else :\n if param.kind is _POSITIONAL_ONLY:\n \n \n new_params.pop(param_name)\n continue\n \n if param.kind is _POSITIONAL_OR_KEYWORD:\n if param_name in partial_keywords:\n \n \n \n \n \n \n \n \n \n \n \n \n transform_to_kwonly=True\n \n new_params[param_name]=param.replace(default=arg_value)\n else :\n \n new_params.pop(param.name)\n continue\n \n if param.kind is _KEYWORD_ONLY:\n \n new_params[param_name]=param.replace(default=arg_value)\n \n if transform_to_kwonly:\n assert param.kind is not _POSITIONAL_ONLY\n \n if param.kind is _POSITIONAL_OR_KEYWORD:\n new_param=new_params[param_name].replace(kind=_KEYWORD_ONLY)\n new_params[param_name]=new_param\n new_params.move_to_end(param_name)\n elif param.kind in (_KEYWORD_ONLY,_VAR_KEYWORD):\n new_params.move_to_end(param_name)\n elif param.kind is _VAR_POSITIONAL:\n new_params.pop(param.name)\n \n return wrapped_sig.replace(parameters=new_params.values())\n \n \ndef _signature_bound_method(sig):\n ''\n\n \n \n params=tuple(sig.parameters.values())\n \n if not params or params[0].kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n raise ValueError('invalid method signature')\n \n kind=params[0].kind\n if kind in (_POSITIONAL_OR_KEYWORD,_POSITIONAL_ONLY):\n \n \n params=params[1:]\n else :\n if kind is not _VAR_POSITIONAL:\n \n \n raise ValueError('invalid argument type')\n \n \n \n return sig.replace(parameters=params)\n \n \ndef _signature_is_builtin(obj):\n ''\n\n \n return (isbuiltin(obj)or\n ismethoddescriptor(obj)or\n isinstance(obj,_NonUserDefinedCallables)or\n \n \n obj in (type,object))\n \n \ndef _signature_is_functionlike(obj):\n ''\n\n\n\n \n \n if not callable(obj)or isclass(obj):\n \n \n return False\n \n name=getattr(obj,'__name__',None )\n code=getattr(obj,'__code__',None )\n defaults=getattr(obj,'__defaults__',_void)\n kwdefaults=getattr(obj,'__kwdefaults__',_void)\n annotations=getattr(obj,'__annotations__',None )\n \n return (isinstance(code,types.CodeType)and\n isinstance(name,str)and\n (defaults is None or isinstance(defaults,tuple))and\n (kwdefaults is None or isinstance(kwdefaults,dict))and\n isinstance(annotations,dict))\n \n \ndef _signature_get_bound_param(spec):\n ''\n\n\n\n\n \n \n assert spec.startswith('($')\n \n pos=spec.find(',')\n if pos ==-1:\n pos=spec.find(')')\n \n cpos=spec.find(':')\n assert cpos ==-1 or cpos >pos\n \n cpos=spec.find('=')\n assert cpos ==-1 or cpos >pos\n \n return spec[2:pos]\n \n \ndef _signature_strip_non_python_syntax(signature):\n ''\n\n\n\n\n\n\n\n\n\n \n \n if not signature:\n return signature,None ,None\n \n self_parameter=None\n last_positional_only=None\n \n lines=[l.encode('ascii')for l in signature.split('\\n')]\n generator=iter(lines).__next__\n token_stream=tokenize.tokenize(generator)\n \n delayed_comma=False\n skip_next_comma=False\n text=[]\n add=text.append\n \n current_parameter=0\n OP=token.OP\n ERRORTOKEN=token.ERRORTOKEN\n \n \n t=next(token_stream)\n assert t.type ==tokenize.ENCODING\n \n for t in token_stream:\n type,string=t.type,t.string\n \n if type ==OP:\n if string ==',':\n if skip_next_comma:\n skip_next_comma=False\n else :\n assert not delayed_comma\n delayed_comma=True\n current_parameter +=1\n continue\n \n if string =='/':\n assert not skip_next_comma\n assert last_positional_only is None\n skip_next_comma=True\n last_positional_only=current_parameter -1\n continue\n \n if (type ==ERRORTOKEN)and (string =='$'):\n assert self_parameter is None\n self_parameter=current_parameter\n continue\n \n if delayed_comma:\n delayed_comma=False\n if not ((type ==OP)and (string ==')')):\n add(', ')\n add(string)\n if (string ==','):\n add(' ')\n clean_signature=''.join(text)\n return clean_signature,self_parameter,last_positional_only\n \n \ndef _signature_fromstr(cls,obj,s,skip_bound_arg=True ):\n ''\n\n \n \n \n import ast\n \n Parameter=cls._parameter_cls\n \n clean_signature,self_parameter,last_positional_only=\\\n _signature_strip_non_python_syntax(s)\n \n program=\"def foo\"+clean_signature+\": pass\"\n \n try :\n module=ast.parse(program)\n except SyntaxError:\n module=None\n \n if not isinstance(module,ast.Module):\n raise ValueError(\"{!r} builtin has invalid signature\".format(obj))\n \n f=module.body[0]\n \n parameters=[]\n empty=Parameter.empty\n invalid=object()\n \n module=None\n module_dict={}\n module_name=getattr(obj,'__module__',None )\n if module_name:\n module=sys.modules.get(module_name,None )\n if module:\n module_dict=module.__dict__\n sys_module_dict=sys.modules.copy()\n \n def parse_name(node):\n assert isinstance(node,ast.arg)\n if node.annotation is not None :\n raise ValueError(\"Annotations are not currently supported\")\n return node.arg\n \n def wrap_value(s):\n try :\n value=eval(s,module_dict)\n except NameError:\n try :\n value=eval(s,sys_module_dict)\n except NameError:\n raise RuntimeError()\n \n if isinstance(value,(str,int,float,bytes,bool,type(None ))):\n return ast.Constant(value)\n raise RuntimeError()\n \n class RewriteSymbolics(ast.NodeTransformer):\n def visit_Attribute(self,node):\n a=[]\n n=node\n while isinstance(n,ast.Attribute):\n a.append(n.attr)\n n=n.value\n if not isinstance(n,ast.Name):\n raise RuntimeError()\n a.append(n.id)\n value=\".\".join(reversed(a))\n return wrap_value(value)\n \n def visit_Name(self,node):\n if not isinstance(node.ctx,ast.Load):\n raise ValueError()\n return wrap_value(node.id)\n \n def p(name_node,default_node,default=empty):\n name=parse_name(name_node)\n if name is invalid:\n return None\n if default_node and default_node is not _empty:\n try :\n default_node=RewriteSymbolics().visit(default_node)\n o=ast.literal_eval(default_node)\n except ValueError:\n o=invalid\n if o is invalid:\n return None\n default=o if o is not invalid else default\n parameters.append(Parameter(name,kind,default=default,annotation=empty))\n \n \n args=reversed(f.args.args)\n defaults=reversed(f.args.defaults)\n iter=itertools.zip_longest(args,defaults,fillvalue=None )\n if last_positional_only is not None :\n kind=Parameter.POSITIONAL_ONLY\n else :\n kind=Parameter.POSITIONAL_OR_KEYWORD\n for i,(name,default)in enumerate(reversed(list(iter))):\n p(name,default)\n if i ==last_positional_only:\n kind=Parameter.POSITIONAL_OR_KEYWORD\n \n \n if f.args.vararg:\n kind=Parameter.VAR_POSITIONAL\n p(f.args.vararg,empty)\n \n \n kind=Parameter.KEYWORD_ONLY\n for name,default in zip(f.args.kwonlyargs,f.args.kw_defaults):\n p(name,default)\n \n \n if f.args.kwarg:\n kind=Parameter.VAR_KEYWORD\n p(f.args.kwarg,empty)\n \n if self_parameter is not None :\n \n \n \n \n \n assert parameters\n _self=getattr(obj,'__self__',None )\n self_isbound=_self is not None\n self_ismodule=ismodule(_self)\n if self_isbound and (self_ismodule or skip_bound_arg):\n parameters.pop(0)\n else :\n \n p=parameters[0].replace(kind=Parameter.POSITIONAL_ONLY)\n parameters[0]=p\n \n return cls(parameters,return_annotation=cls.empty)\n \n \ndef _signature_from_builtin(cls,func,skip_bound_arg=True ):\n ''\n\n \n \n if not _signature_is_builtin(func):\n raise TypeError(\"{!r} is not a Python builtin \"\n \"function\".format(func))\n \n s=getattr(func,\"__text_signature__\",None )\n if not s:\n raise ValueError(\"no signature found for builtin {!r}\".format(func))\n \n return _signature_fromstr(cls,func,s,skip_bound_arg)\n \n \ndef _signature_from_function(cls,func,skip_bound_arg=True ):\n ''\n \n is_duck_function=False\n if not isfunction(func):\n if _signature_is_functionlike(func):\n is_duck_function=True\n else :\n \n \n raise TypeError('{!r} is not a Python function'.format(func))\n \n s=getattr(func,\"__text_signature__\",None )\n if s:\n return _signature_fromstr(cls,func,s,skip_bound_arg)\n \n Parameter=cls._parameter_cls\n \n \n func_code=func.__code__\n pos_count=func_code.co_argcount\n arg_names=func_code.co_varnames\n posonly_count=func_code.co_posonlyargcount\n positional=arg_names[:pos_count]\n keyword_only_count=func_code.co_kwonlyargcount\n keyword_only=arg_names[pos_count:pos_count+keyword_only_count]\n annotations=func.__annotations__\n defaults=func.__defaults__\n kwdefaults=func.__kwdefaults__\n \n if defaults:\n pos_default_count=len(defaults)\n else :\n pos_default_count=0\n \n parameters=[]\n \n non_default_count=pos_count -pos_default_count\n posonly_left=posonly_count\n \n \n for name in positional[:non_default_count]:\n kind=_POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=kind))\n if posonly_left:\n posonly_left -=1\n \n \n for offset,name in enumerate(positional[non_default_count:]):\n kind=_POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=kind,\n default=defaults[offset]))\n if posonly_left:\n posonly_left -=1\n \n \n if func_code.co_flags&CO_VARARGS:\n name=arg_names[pos_count+keyword_only_count]\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_VAR_POSITIONAL))\n \n \n for name in keyword_only:\n default=_empty\n if kwdefaults is not None :\n default=kwdefaults.get(name,_empty)\n \n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_KEYWORD_ONLY,\n default=default))\n \n if func_code.co_flags&CO_VARKEYWORDS:\n index=pos_count+keyword_only_count\n if func_code.co_flags&CO_VARARGS:\n index +=1\n \n name=arg_names[index]\n annotation=annotations.get(name,_empty)\n parameters.append(Parameter(name,annotation=annotation,\n kind=_VAR_KEYWORD))\n \n \n \n return cls(parameters,\n return_annotation=annotations.get('return',_empty),\n __validate_parameters__=is_duck_function)\n \n \ndef _signature_from_callable(obj,*,\nfollow_wrapper_chains=True ,\nskip_bound_arg=True ,\nsigcls):\n\n ''\n\n \n \n if not callable(obj):\n raise TypeError('{!r} is not a callable object'.format(obj))\n \n if isinstance(obj,types.MethodType):\n \n \n sig=_signature_from_callable(\n obj.__func__,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n if skip_bound_arg:\n return _signature_bound_method(sig)\n else :\n return sig\n \n \n if follow_wrapper_chains:\n obj=unwrap(obj,stop=(lambda f:hasattr(f,\"__signature__\")))\n if isinstance(obj,types.MethodType):\n \n \n \n return _signature_from_callable(\n obj,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n try :\n sig=obj.__signature__\n except AttributeError:\n pass\n else :\n if sig is not None :\n if not isinstance(sig,Signature):\n raise TypeError(\n 'unexpected object {!r} in __signature__ '\n 'attribute'.format(sig))\n return sig\n \n try :\n partialmethod=obj._partialmethod\n except AttributeError:\n pass\n else :\n if isinstance(partialmethod,functools.partialmethod):\n \n \n \n \n \n \n \n wrapped_sig=_signature_from_callable(\n partialmethod.func,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n sig=_signature_get_partial(wrapped_sig,partialmethod,(None ,))\n first_wrapped_param=tuple(wrapped_sig.parameters.values())[0]\n if first_wrapped_param.kind is Parameter.VAR_POSITIONAL:\n \n \n return sig\n else :\n sig_params=tuple(sig.parameters.values())\n assert (not sig_params or\n first_wrapped_param is not sig_params[0])\n new_params=(first_wrapped_param,)+sig_params\n return sig.replace(parameters=new_params)\n \n if isfunction(obj)or _signature_is_functionlike(obj):\n \n \n return _signature_from_function(sigcls,obj,\n skip_bound_arg=skip_bound_arg)\n \n if _signature_is_builtin(obj):\n return _signature_from_builtin(sigcls,obj,\n skip_bound_arg=skip_bound_arg)\n \n if isinstance(obj,functools.partial):\n wrapped_sig=_signature_from_callable(\n obj.func,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n return _signature_get_partial(wrapped_sig,obj)\n \n sig=None\n if isinstance(obj,type):\n \n \n \n \n call=_signature_get_user_defined_method(type(obj),'__call__')\n if call is not None :\n sig=_signature_from_callable(\n call,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n else :\n \n new=_signature_get_user_defined_method(obj,'__new__')\n if new is not None :\n sig=_signature_from_callable(\n new,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n else :\n \n init=_signature_get_user_defined_method(obj,'__init__')\n if init is not None :\n sig=_signature_from_callable(\n init,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n \n if sig is None :\n \n \n \n for base in obj.__mro__[:-1]:\n \n \n \n \n \n \n \n try :\n text_sig=base.__text_signature__\n except AttributeError:\n pass\n else :\n if text_sig:\n \n \n return _signature_fromstr(sigcls,obj,text_sig)\n \n \n \n \n if type not in obj.__mro__:\n \n \n if (obj.__init__ is object.__init__ and\n obj.__new__ is object.__new__):\n \n return sigcls.from_callable(object)\n else :\n raise ValueError(\n 'no signature found for builtin type {!r}'.format(obj))\n \n elif not isinstance(obj,_NonUserDefinedCallables):\n \n \n \n \n call=_signature_get_user_defined_method(type(obj),'__call__')\n if call is not None :\n try :\n sig=_signature_from_callable(\n call,\n follow_wrapper_chains=follow_wrapper_chains,\n skip_bound_arg=skip_bound_arg,\n sigcls=sigcls)\n except ValueError as ex:\n msg='no signature found for {!r}'.format(obj)\n raise ValueError(msg)from ex\n \n if sig is not None :\n \n \n if skip_bound_arg:\n return _signature_bound_method(sig)\n else :\n return sig\n \n if isinstance(obj,types.BuiltinFunctionType):\n \n msg='no signature found for builtin function {!r}'.format(obj)\n raise ValueError(msg)\n \n raise ValueError('callable {!r} is not supported by signature'.format(obj))\n \n \nclass _void:\n ''\n \n \nclass _empty:\n ''\n \n \nclass _ParameterKind(enum.IntEnum):\n POSITIONAL_ONLY=0\n POSITIONAL_OR_KEYWORD=1\n VAR_POSITIONAL=2\n KEYWORD_ONLY=3\n VAR_KEYWORD=4\n \n def __str__(self):\n return self._name_\n \n @property\n def description(self):\n return _PARAM_NAME_MAPPING[self]\n \n_POSITIONAL_ONLY=_ParameterKind.POSITIONAL_ONLY\n_POSITIONAL_OR_KEYWORD=_ParameterKind.POSITIONAL_OR_KEYWORD\n_VAR_POSITIONAL=_ParameterKind.VAR_POSITIONAL\n_KEYWORD_ONLY=_ParameterKind.KEYWORD_ONLY\n_VAR_KEYWORD=_ParameterKind.VAR_KEYWORD\n\n_PARAM_NAME_MAPPING={\n_POSITIONAL_ONLY:'positional-only',\n_POSITIONAL_OR_KEYWORD:'positional or keyword',\n_VAR_POSITIONAL:'variadic positional',\n_KEYWORD_ONLY:'keyword-only',\n_VAR_KEYWORD:'variadic keyword'\n}\n\n\nclass Parameter:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_name','_kind','_default','_annotation')\n \n POSITIONAL_ONLY=_POSITIONAL_ONLY\n POSITIONAL_OR_KEYWORD=_POSITIONAL_OR_KEYWORD\n VAR_POSITIONAL=_VAR_POSITIONAL\n KEYWORD_ONLY=_KEYWORD_ONLY\n VAR_KEYWORD=_VAR_KEYWORD\n \n empty=_empty\n \n def __init__(self,name,kind,*,default=_empty,annotation=_empty):\n try :\n self._kind=_ParameterKind(kind)\n except ValueError:\n raise ValueError(f'value {kind!r} is not a valid Parameter.kind')\n if default is not _empty:\n if self._kind in (_VAR_POSITIONAL,_VAR_KEYWORD):\n msg='{} parameters cannot have default values'\n msg=msg.format(self._kind.description)\n raise ValueError(msg)\n self._default=default\n self._annotation=annotation\n \n if name is _empty:\n raise ValueError('name is a required attribute for Parameter')\n \n if not isinstance(name,str):\n msg='name must be a str, not a {}'.format(type(name).__name__)\n raise TypeError(msg)\n \n if name[0]=='.'and name[1:].isdigit():\n \n \n \n \n if self._kind !=_POSITIONAL_OR_KEYWORD:\n msg=(\n 'implicit arguments must be passed as '\n 'positional or keyword arguments, not {}'\n )\n msg=msg.format(self._kind.description)\n raise ValueError(msg)\n self._kind=_POSITIONAL_ONLY\n name='implicit{}'.format(name[1:])\n \n if not name.isidentifier():\n raise ValueError('{!r} is not a valid parameter name'.format(name))\n \n self._name=name\n \n def __reduce__(self):\n return (type(self),\n (self._name,self._kind),\n {'_default':self._default,\n '_annotation':self._annotation})\n \n def __setstate__(self,state):\n self._default=state['_default']\n self._annotation=state['_annotation']\n \n @property\n def name(self):\n return self._name\n \n @property\n def default(self):\n return self._default\n \n @property\n def annotation(self):\n return self._annotation\n \n @property\n def kind(self):\n return self._kind\n \n def replace(self,*,name=_void,kind=_void,\n annotation=_void,default=_void):\n ''\n \n if name is _void:\n name=self._name\n \n if kind is _void:\n kind=self._kind\n \n if annotation is _void:\n annotation=self._annotation\n \n if default is _void:\n default=self._default\n \n return type(self)(name,kind,default=default,annotation=annotation)\n \n def __str__(self):\n kind=self.kind\n formatted=self._name\n \n \n if self._annotation is not _empty:\n formatted='{}: {}'.format(formatted,\n formatannotation(self._annotation))\n \n if self._default is not _empty:\n if self._annotation is not _empty:\n formatted='{} = {}'.format(formatted,repr(self._default))\n else :\n formatted='{}={}'.format(formatted,repr(self._default))\n \n if kind ==_VAR_POSITIONAL:\n formatted='*'+formatted\n elif kind ==_VAR_KEYWORD:\n formatted='**'+formatted\n \n return formatted\n \n def __repr__(self):\n return '<{} \"{}\">'.format(self.__class__.__name__,self)\n \n def __hash__(self):\n return hash((self.name,self.kind,self.annotation,self.default))\n \n def __eq__(self,other):\n if self is other:\n return True\n if not isinstance(other,Parameter):\n return NotImplemented\n return (self._name ==other._name and\n self._kind ==other._kind and\n self._default ==other._default and\n self._annotation ==other._annotation)\n \n \nclass BoundArguments:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('arguments','_signature','__weakref__')\n \n def __init__(self,signature,arguments):\n self.arguments=arguments\n self._signature=signature\n \n @property\n def signature(self):\n return self._signature\n \n @property\n def args(self):\n args=[]\n for param_name,param in self._signature.parameters.items():\n if param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n break\n \n try :\n arg=self.arguments[param_name]\n except KeyError:\n \n \n break\n else :\n if param.kind ==_VAR_POSITIONAL:\n \n args.extend(arg)\n else :\n \n args.append(arg)\n \n return tuple(args)\n \n @property\n def kwargs(self):\n kwargs={}\n kwargs_started=False\n for param_name,param in self._signature.parameters.items():\n if not kwargs_started:\n if param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n kwargs_started=True\n else :\n if param_name not in self.arguments:\n kwargs_started=True\n continue\n \n if not kwargs_started:\n continue\n \n try :\n arg=self.arguments[param_name]\n except KeyError:\n pass\n else :\n if param.kind ==_VAR_KEYWORD:\n \n kwargs.update(arg)\n else :\n \n kwargs[param_name]=arg\n \n return kwargs\n \n def apply_defaults(self):\n ''\n\n\n\n\n\n\n \n arguments=self.arguments\n new_arguments=[]\n for name,param in self._signature.parameters.items():\n try :\n new_arguments.append((name,arguments[name]))\n except KeyError:\n if param.default is not _empty:\n val=param.default\n elif param.kind is _VAR_POSITIONAL:\n val=()\n elif param.kind is _VAR_KEYWORD:\n val={}\n else :\n \n \n continue\n new_arguments.append((name,val))\n self.arguments=OrderedDict(new_arguments)\n \n def __eq__(self,other):\n if self is other:\n return True\n if not isinstance(other,BoundArguments):\n return NotImplemented\n return (self.signature ==other.signature and\n self.arguments ==other.arguments)\n \n def __setstate__(self,state):\n self._signature=state['_signature']\n self.arguments=state['arguments']\n \n def __getstate__(self):\n return {'_signature':self._signature,'arguments':self.arguments}\n \n def __repr__(self):\n args=[]\n for arg,value in self.arguments.items():\n args.append('{}={!r}'.format(arg,value))\n return '<{} ({})>'.format(self.__class__.__name__,', '.join(args))\n \n \nclass Signature:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('_return_annotation','_parameters')\n \n _parameter_cls=Parameter\n _bound_arguments_cls=BoundArguments\n \n empty=_empty\n \n def __init__(self,parameters=None ,*,return_annotation=_empty,\n __validate_parameters__=True ):\n ''\n\n \n \n if parameters is None :\n params=OrderedDict()\n else :\n if __validate_parameters__:\n params=OrderedDict()\n top_kind=_POSITIONAL_ONLY\n kind_defaults=False\n \n for idx,param in enumerate(parameters):\n kind=param.kind\n name=param.name\n \n if kind top_kind:\n kind_defaults=False\n top_kind=kind\n \n if kind in (_POSITIONAL_ONLY,_POSITIONAL_OR_KEYWORD):\n if param.default is _empty:\n if kind_defaults:\n \n \n \n msg='non-default argument follows default '\\\n 'argument'\n raise ValueError(msg)\n else :\n \n kind_defaults=True\n \n if name in params:\n msg='duplicate parameter name: {!r}'.format(name)\n raise ValueError(msg)\n \n params[name]=param\n else :\n params=OrderedDict(((param.name,param)\n for param in parameters))\n \n self._parameters=types.MappingProxyType(params)\n self._return_annotation=return_annotation\n \n @classmethod\n def from_function(cls,func):\n ''\n\n\n \n \n warnings.warn(\"inspect.Signature.from_function() is deprecated since \"\n \"Python 3.5, use Signature.from_callable()\",\n DeprecationWarning,stacklevel=2)\n return _signature_from_function(cls,func)\n \n @classmethod\n def from_builtin(cls,func):\n ''\n\n\n \n \n warnings.warn(\"inspect.Signature.from_builtin() is deprecated since \"\n \"Python 3.5, use Signature.from_callable()\",\n DeprecationWarning,stacklevel=2)\n return _signature_from_builtin(cls,func)\n \n @classmethod\n def from_callable(cls,obj,*,follow_wrapped=True ):\n ''\n return _signature_from_callable(obj,sigcls=cls,\n follow_wrapper_chains=follow_wrapped)\n \n @property\n def parameters(self):\n return self._parameters\n \n @property\n def return_annotation(self):\n return self._return_annotation\n \n def replace(self,*,parameters=_void,return_annotation=_void):\n ''\n\n\n \n \n if parameters is _void:\n parameters=self.parameters.values()\n \n if return_annotation is _void:\n return_annotation=self._return_annotation\n \n return type(self)(parameters,\n return_annotation=return_annotation)\n \n def _hash_basis(self):\n params=tuple(param for param in self.parameters.values()\n if param.kind !=_KEYWORD_ONLY)\n \n kwo_params={param.name:param for param in self.parameters.values()\n if param.kind ==_KEYWORD_ONLY}\n \n return params,kwo_params,self.return_annotation\n \n def __hash__(self):\n params,kwo_params,return_annotation=self._hash_basis()\n kwo_params=frozenset(kwo_params.values())\n return hash((params,kwo_params,return_annotation))\n \n def __eq__(self,other):\n if self is other:\n return True\n if not isinstance(other,Signature):\n return NotImplemented\n return self._hash_basis()==other._hash_basis()\n \n def _bind(self,args,kwargs,*,partial=False ):\n ''\n \n arguments=OrderedDict()\n \n parameters=iter(self.parameters.values())\n parameters_ex=()\n arg_vals=iter(args)\n \n while True :\n \n \n try :\n arg_val=next(arg_vals)\n except StopIteration:\n \n try :\n param=next(parameters)\n except StopIteration:\n \n \n break\n else :\n if param.kind ==_VAR_POSITIONAL:\n \n \n break\n elif param.name in kwargs:\n if param.kind ==_POSITIONAL_ONLY:\n msg='{arg!r} parameter is positional only, '\\\n 'but was passed as a keyword'\n msg=msg.format(arg=param.name)\n raise TypeError(msg)from None\n parameters_ex=(param,)\n break\n elif (param.kind ==_VAR_KEYWORD or\n param.default is not _empty):\n \n \n \n parameters_ex=(param,)\n break\n else :\n \n \n if partial:\n parameters_ex=(param,)\n break\n else :\n msg='missing a required argument: {arg!r}'\n msg=msg.format(arg=param.name)\n raise TypeError(msg)from None\n else :\n \n try :\n param=next(parameters)\n except StopIteration:\n raise TypeError('too many positional arguments')from None\n else :\n if param.kind in (_VAR_KEYWORD,_KEYWORD_ONLY):\n \n \n raise TypeError(\n 'too many positional arguments')from None\n \n if param.kind ==_VAR_POSITIONAL:\n \n \n \n values=[arg_val]\n values.extend(arg_vals)\n arguments[param.name]=tuple(values)\n break\n \n if param.name in kwargs:\n raise TypeError(\n 'multiple values for argument {arg!r}'.format(\n arg=param.name))from None\n \n arguments[param.name]=arg_val\n \n \n \n kwargs_param=None\n for param in itertools.chain(parameters_ex,parameters):\n if param.kind ==_VAR_KEYWORD:\n \n kwargs_param=param\n continue\n \n if param.kind ==_VAR_POSITIONAL:\n \n \n \n continue\n \n param_name=param.name\n try :\n arg_val=kwargs.pop(param_name)\n except KeyError:\n \n \n \n \n if (not partial and param.kind !=_VAR_POSITIONAL and\n param.default is _empty):\n raise TypeError('missing a required argument: {arg!r}'.\\\n format(arg=param_name))from None\n \n else :\n if param.kind ==_POSITIONAL_ONLY:\n \n \n \n raise TypeError('{arg!r} parameter is positional only, '\n 'but was passed as a keyword'.\\\n format(arg=param.name))\n \n arguments[param_name]=arg_val\n \n if kwargs:\n if kwargs_param is not None :\n \n arguments[kwargs_param.name]=kwargs\n else :\n raise TypeError(\n 'got an unexpected keyword argument {arg!r}'.format(\n arg=next(iter(kwargs))))\n \n return self._bound_arguments_cls(self,arguments)\n \n def bind(self,/,*args,**kwargs):\n ''\n\n\n \n return self._bind(args,kwargs)\n \n def bind_partial(self,/,*args,**kwargs):\n ''\n\n\n \n return self._bind(args,kwargs,partial=True )\n \n def __reduce__(self):\n return (type(self),\n (tuple(self._parameters.values()),),\n {'_return_annotation':self._return_annotation})\n \n def __setstate__(self,state):\n self._return_annotation=state['_return_annotation']\n \n def __repr__(self):\n return '<{} {}>'.format(self.__class__.__name__,self)\n \n def __str__(self):\n result=[]\n render_pos_only_separator=False\n render_kw_only_separator=True\n for param in self.parameters.values():\n formatted=str(param)\n \n kind=param.kind\n \n if kind ==_POSITIONAL_ONLY:\n render_pos_only_separator=True\n elif render_pos_only_separator:\n \n \n result.append('/')\n render_pos_only_separator=False\n \n if kind ==_VAR_POSITIONAL:\n \n \n render_kw_only_separator=False\n elif kind ==_KEYWORD_ONLY and render_kw_only_separator:\n \n \n \n result.append('*')\n \n \n render_kw_only_separator=False\n \n result.append(formatted)\n \n if render_pos_only_separator:\n \n \n result.append('/')\n \n rendered='({})'.format(', '.join(result))\n \n if self.return_annotation is not _empty:\n anno=formatannotation(self.return_annotation)\n rendered +=' -> {}'.format(anno)\n \n return rendered\n \n \ndef signature(obj,*,follow_wrapped=True ):\n ''\n return Signature.from_callable(obj,follow_wrapped=follow_wrapped)\n \n \ndef _main():\n ''\n import argparse\n import importlib\n \n parser=argparse.ArgumentParser()\n parser.add_argument(\n 'object',\n help=\"The object to be analysed. \"\n \"It supports the 'module:qualname' syntax\")\n parser.add_argument(\n '-d','--details',action='store_true',\n help='Display info about the module rather than its source code')\n \n args=parser.parse_args()\n \n target=args.object\n mod_name,has_attrs,attrs=target.partition(\":\")\n try :\n obj=module=importlib.import_module(mod_name)\n except Exception as exc:\n msg=\"Failed to import {} ({}: {})\".format(mod_name,\n type(exc).__name__,\n exc)\n print(msg,file=sys.stderr)\n sys.exit(2)\n \n if has_attrs:\n parts=attrs.split(\".\")\n obj=module\n for part in parts:\n obj=getattr(obj,part)\n \n if module.__name__ in sys.builtin_module_names:\n print(\"Can't get info for builtin modules.\",file=sys.stderr)\n sys.exit(1)\n \n if args.details:\n print('Target: {}'.format(target))\n print('Origin: {}'.format(getsourcefile(module)))\n print('Cached: {}'.format(module.__cached__))\n if obj is module:\n print('Loader: {}'.format(repr(module.__loader__)))\n if hasattr(module,'__path__'):\n print('Submodule search path: {}'.format(module.__path__))\n else :\n try :\n __,lineno=findsource(obj)\n except Exception:\n pass\n else :\n print('Line: {}'.format(lineno))\n \n print('\\n')\n else :\n print(getsource(obj))\n \n \nif __name__ ==\"__main__\":\n _main()\n", ["abc", "argparse", "ast", "builtins", "collections", "collections.abc", "dis", "enum", "functools", "importlib", "importlib.machinery", "itertools", "linecache", "operator", "os", "re", "sys", "token", "tokenize", "types", "warnings"]], "interpreter": [".py", "import sys\nimport tb as traceback\n\nfrom browser import console,document,window,html,DOMNode\nfrom browser.widgets.dialog import Dialog\n\n_credits=\"\"\" Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands\n for supporting Python development. See www.python.org for more information.\"\"\"\n\n_copyright=\"\"\"Copyright (c) 2012, Pierre Quentel pierre.quentel@gmail.com\nAll Rights Reserved.\n\nCopyright (c) 2001-2013 Python Software Foundation.\nAll Rights Reserved.\n\nCopyright (c) 2000 BeOpen.com.\nAll Rights Reserved.\n\nCopyright (c) 1995-2001 Corporation for National Research Initiatives.\nAll Rights Reserved.\n\nCopyright (c) 1991-1995 Stichting Mathematisch Centrum, Amsterdam.\nAll Rights Reserved.\"\"\"\n\n_license=\"\"\"Copyright (c) 2012, Pierre Quentel pierre.quentel@gmail.com\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\nRedistributions of source code must retain the above copyright notice, this\nlist of conditions and the following disclaimer. Redistributions in binary\nform must reproduce the above copyright notice, this list of conditions and\nthe following disclaimer in the documentation and/or other materials provided\nwith the distribution.\nNeither the name of the nor the names of its contributors may\nbe used to endorse or promote products derived from this software without\nspecific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\nARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\nLIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\nCONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\nSUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\nINTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\nCONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\nARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\nPOSSIBILITY OF SUCH DAMAGE.\n\"\"\"\n\nclass Info:\n\n def __init__(self,msg):\n self.msg=msg\n \n def __repr__(self):\n return self.msg\n \n \n \neditor_ns={\n'credits':Info(_credits),\n'copyright':Info(_copyright),\n'license':Info(_license)\n}\n\n\nstyle_sheet=\"\"\"\n.brython-interpreter {\n background-color: #000;\n color: #fff;\n font-family: consolas, courier;\n}\n\"\"\"\n\n\nclass Trace:\n\n def __init__(self):\n self.buf=\"\"\n \n def write(self,data):\n self.buf +=str(data)\n \n def format(self):\n ''\n lines=self.buf.split(\"\\n\")\n stripped=[lines[0]]\n for i in range(1,len(lines),2):\n if __file__ in lines[i]:\n continue\n stripped +=lines[i:i+2]\n return \"\\n\".join(stripped)\n \n \nclass Interpreter:\n ''\n \n def __init__(self,elt_id=None ,title=\"Interactive Interpreter\",\n globals=None ,locals=None ,\n rows=30,cols=84,default_css=True ):\n ''\n\n\n\n\n \n if default_css:\n \n for stylesheet in document.styleSheets:\n if stylesheet.ownerNode.id ==\"brython-interpreter\":\n break\n else :\n document <=html.STYLE(style_sheet,id=\"brython-interpreter\")\n \n if elt_id is None :\n self.dialog=Dialog(title=title,top=10,left=10,\n default_css=default_css)\n self.zone=html.TEXTAREA(rows=rows,cols=cols,\n Class=\"brython-interpreter\")\n self.dialog.panel <=self.zone\n else :\n if isinstance(elt_id,str):\n try :\n elt=document[elt_id]\n if elt.tagName !=\"TEXTAREA\":\n raise ValueError(\n f\"element {elt_id} is a {elt.tagName}, \"+\n \"not a TEXTAREA\")\n self.zone=elt\n except KeyError:\n raise KeyError(f\"no element with id '{elt_id}'\")\n elif isinstance(elt_id,DOMNode):\n if elt_id.tagName ==\"TEXTAREA\":\n self.zone=elt_id\n else :\n raise ValueError(\"element is not a TEXTAREA\")\n else :\n raise ValueError(\"element should be a string or \"+\n f\"a TEXTAREA, got '{elt_id.__class__.__name__}'\")\n v=sys.implementation.version\n self.zone.value=(f\"Brython {v[0]}.{v[1]}.{v[2]} on \"+\n f\"{window.navigator.appName} {window.navigator.appVersion}\\n>>> \")\n self.cursor_to_end()\n self._status=\"main\"\n self.current=0\n self.history=[]\n \n self.globals={}if globals is None else globals\n self.globals.update(editor_ns)\n self.locals=self.globals if locals is None else locals\n \n self.buffer=''\n sys.stdout.write=sys.stderr.write=self.write\n sys.stdout.__len__=sys.stderr.__len__=lambda :len(self.buffer)\n \n self.zone.bind('keypress',self.keypress)\n self.zone.bind('keydown',self.keydown)\n self.zone.bind('mouseup',self.mouseup)\n \n self.zone.focus()\n \n def cursor_to_end(self,*args):\n pos=len(self.zone.value)\n self.zone.setSelectionRange(pos,pos)\n self.zone.scrollTop=self.zone.scrollHeight\n \n def get_col(self):\n \n sel=self.zone.selectionStart\n lines=self.zone.value.split('\\n')\n for line in lines[:-1]:\n sel -=len(line)+1\n return sel\n \n def keypress(self,event):\n if event.keyCode ==9:\n event.preventDefault()\n self.zone.value +=\" \"\n elif event.keyCode ==13:\n sel_start=self.zone.selectionStart\n sel_end=self.zone.selectionEnd\n if sel_end >sel_start:\n \n document.execCommand(\"copy\")\n self.cursor_to_end()\n event.preventDefault()\n return\n src=self.zone.value\n if self._status ==\"main\":\n currentLine=src[src.rfind('>>>')+4:]\n elif self._status ==\"3string\":\n currentLine=src[src.rfind('>>>')+4:]\n currentLine=currentLine.replace('\\n... ','\\n')\n else :\n currentLine=src[src.rfind('...')+4:]\n if self._status =='main'and not currentLine.strip():\n self.zone.value +='\\n>>> '\n event.preventDefault()\n return\n self.zone.value +='\\n'\n self.history.append(currentLine)\n self.current=len(self.history)\n if self._status in [\"main\",\"3string\"]:\n try :\n _=editor_ns['_']=eval(currentLine,\n self.globals,\n self.locals)\n self.flush()\n if _ is not None :\n self.write(repr(_)+'\\n')\n self.flush()\n self.zone.value +='>>> '\n self._status=\"main\"\n except IndentationError:\n self.zone.value +='... '\n self._status=\"block\"\n except SyntaxError as msg:\n if str(msg)=='invalid syntax : triple string end not found'or\\\n str(msg).startswith('Unbalanced bracket'):\n self.zone.value +='... '\n self._status=\"3string\"\n elif str(msg)=='eval() argument must be an expression':\n try :\n exec(currentLine,\n self.globals,\n self.locals)\n except :\n self.print_tb()\n self.flush()\n self.zone.value +='>>> '\n self._status=\"main\"\n elif str(msg)=='decorator expects function':\n self.zone.value +='... '\n self._status=\"block\"\n else :\n self.syntax_error(msg.args)\n self.zone.value +='>>> '\n self._status=\"main\"\n except :\n \n \n \n self.print_tb()\n self.zone.value +='>>> '\n self._status=\"main\"\n elif currentLine ==\"\":\n block=src[src.rfind('\\n>>>')+5:].splitlines()\n block=[block[0]]+[b[4:]for b in block[1:]]\n block_src='\\n'.join(block)\n \n self._status=\"main\"\n try :\n _=exec(block_src,\n self.globals,\n self.locals)\n if _ is not None :\n print(repr(_))\n except :\n self.print_tb()\n self.flush()\n self.zone.value +='>>> '\n else :\n self.zone.value +='... '\n \n self.cursor_to_end()\n event.preventDefault()\n \n def keydown(self,event):\n if event.keyCode ==37:\n sel=self.get_col()\n if sel <5:\n event.preventDefault()\n event.stopPropagation()\n elif event.keyCode ==36:\n pos=self.zone.selectionStart\n col=self.get_col()\n self.zone.setSelectionRange(pos -col+4,pos -col+4)\n event.preventDefault()\n elif event.keyCode ==38:\n if self.current >0:\n pos=self.zone.selectionStart\n col=self.get_col()\n \n self.zone.value=self.zone.value[:pos -col+4]\n self.current -=1\n self.zone.value +=self.history[self.current]\n event.preventDefault()\n elif event.keyCode ==40:\n if self.current \",\">\")\n frames_sel <=html.OPTION(name)\n frame=frame.f_back\n frames_sel.bind(\"change\",self.change_frame)\n frame_div=html.DIV(\"Frame \"+frames_sel)\n panel_style=window.getComputedStyle(self.dialog.panel)\n frame_div.style.paddingLeft=panel_style.paddingLeft\n frame_div.style.paddingTop=panel_style.paddingTop\n self.dialog.insertBefore(frame_div,self.dialog.panel)\n \n def change_frame(self,ev):\n self.globals,self.locals=self.frames[ev.target.selectedIndex]\n \n", ["browser", "browser.html", "browser.widgets.dialog", "sys", "tb"]], "io": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__=(\"Guido van Rossum , \"\n\"Mike Verdone , \"\n\"Mark Russell , \"\n\"Antoine Pitrou , \"\n\"Amaury Forgeot d'Arc , \"\n\"Benjamin Peterson \")\n\n__all__=[\"BlockingIOError\",\"open\",\"open_code\",\"IOBase\",\"RawIOBase\",\n\"FileIO\",\"BytesIO\",\"StringIO\",\"BufferedIOBase\",\n\"BufferedReader\",\"BufferedWriter\",\"BufferedRWPair\",\n\"BufferedRandom\",\"TextIOBase\",\"TextIOWrapper\",\n\"UnsupportedOperation\",\"SEEK_SET\",\"SEEK_CUR\",\"SEEK_END\"]\n\n\nimport _io\nimport abc\n\nfrom _io import (DEFAULT_BUFFER_SIZE,BlockingIOError,UnsupportedOperation,\nopen,open_code,FileIO,BytesIO,StringIO,BufferedReader,\nBufferedWriter,BufferedRWPair,BufferedRandom,\nIncrementalNewlineDecoder,TextIOWrapper)\n\nOpenWrapper=_io.open\n\n\nUnsupportedOperation.__module__=\"io\"\n\n\nSEEK_SET=0\nSEEK_CUR=1\nSEEK_END=2\n\n\n\n\nclass IOBase(_io._IOBase,metaclass=abc.ABCMeta):\n __doc__=_io._IOBase.__doc__\n \nclass RawIOBase(_io._RawIOBase,IOBase):\n __doc__=_io._RawIOBase.__doc__\n \nclass BufferedIOBase(_io._BufferedIOBase,IOBase):\n __doc__=_io._BufferedIOBase.__doc__\n \nclass TextIOBase(_io._TextIOBase,IOBase):\n __doc__=_io._TextIOBase.__doc__\n \nRawIOBase.register(FileIO)\n\nfor klass in (BytesIO,BufferedReader,BufferedWriter,BufferedRandom,\nBufferedRWPair):\n BufferedIOBase.register(klass)\n \nfor klass in (StringIO,TextIOWrapper):\n TextIOBase.register(klass)\ndel klass\n\ntry :\n from _io import _WindowsConsoleIO\nexcept ImportError:\n pass\nelse :\n RawIOBase.register(_WindowsConsoleIO)\n", ["_io", "abc"]], "ipaddress": [".py", "\n\n\n\"\"\"A fast, lightweight IPv4/IPv6 manipulation library in Python.\n\nThis library is used to create/poke/manipulate IPv4 and IPv6 addresses\nand networks.\n\n\"\"\"\n\n__version__='1.0'\n\n\nimport functools\n\nIPV4LENGTH=32\nIPV6LENGTH=128\n\nclass AddressValueError(ValueError):\n ''\n \n \nclass NetmaskValueError(ValueError):\n ''\n \n \ndef ip_address(address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return IPv4Address(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n try :\n return IPv6Address(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n raise ValueError('%r does not appear to be an IPv4 or IPv6 address'%\n address)\n \n \ndef ip_network(address,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return IPv4Network(address,strict)\n except (AddressValueError,NetmaskValueError):\n pass\n \n try :\n return IPv6Network(address,strict)\n except (AddressValueError,NetmaskValueError):\n pass\n \n raise ValueError('%r does not appear to be an IPv4 or IPv6 network'%\n address)\n \n \ndef ip_interface(address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return IPv4Interface(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n try :\n return IPv6Interface(address)\n except (AddressValueError,NetmaskValueError):\n pass\n \n raise ValueError('%r does not appear to be an IPv4 or IPv6 interface'%\n address)\n \n \ndef v4_int_to_packed(address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return address.to_bytes(4,'big')\n except OverflowError:\n raise ValueError(\"Address negative or too large for IPv4\")\n \n \ndef v6_int_to_packed(address):\n ''\n\n\n\n\n\n\n\n \n try :\n return address.to_bytes(16,'big')\n except OverflowError:\n raise ValueError(\"Address negative or too large for IPv6\")\n \n \ndef _split_optional_netmask(address):\n ''\n addr=str(address).split('/')\n if len(addr)>2:\n raise AddressValueError(\"Only one '/' permitted in %r\"%address)\n return addr\n \n \ndef _find_address_range(addresses):\n ''\n\n\n\n\n\n\n\n \n it=iter(addresses)\n first=last=next(it)\n for ip in it:\n if ip._ip !=last._ip+1:\n yield first,last\n first=ip\n last=ip\n yield first,last\n \n \ndef _count_righthand_zero_bits(number,bits):\n ''\n\n\n\n\n\n\n\n\n \n if number ==0:\n return bits\n return min(bits,(~number&(number -1)).bit_length())\n \n \ndef summarize_address_range(first,last):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if (not (isinstance(first,_BaseAddress)and\n isinstance(last,_BaseAddress))):\n raise TypeError('first and last must be IP addresses, not networks')\n if first.version !=last.version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n first,last))\n if first >last:\n raise ValueError('last IP address must be greater than first')\n \n if first.version ==4:\n ip=IPv4Network\n elif first.version ==6:\n ip=IPv6Network\n else :\n raise ValueError('unknown IP version')\n \n ip_bits=first._max_prefixlen\n first_int=first._ip\n last_int=last._ip\n while first_int <=last_int:\n nbits=min(_count_righthand_zero_bits(first_int,ip_bits),\n (last_int -first_int+1).bit_length()-1)\n net=ip((first_int,ip_bits -nbits))\n yield net\n first_int +=1 <=net.broadcast_address:\n continue\n yield net\n last=net\n \n \ndef collapse_addresses(addresses):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n addrs=[]\n ips=[]\n nets=[]\n \n \n for ip in addresses:\n if isinstance(ip,_BaseAddress):\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n ips.append(ip)\n elif ip._prefixlen ==ip._max_prefixlen:\n if ips and ips[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,ips[-1]))\n try :\n ips.append(ip.ip)\n except AttributeError:\n ips.append(ip.network_address)\n else :\n if nets and nets[-1]._version !=ip._version:\n raise TypeError(\"%s and %s are not of the same version\"%(\n ip,nets[-1]))\n nets.append(ip)\n \n \n ips=sorted(set(ips))\n \n \n if ips:\n for first,last in _find_address_range(ips):\n addrs.extend(summarize_address_range(first,last))\n \n return _collapse_addresses_internal(addrs+nets)\n \n \ndef get_mixed_type_key(obj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(obj,_BaseNetwork):\n return obj._get_networks_key()\n elif isinstance(obj,_BaseAddress):\n return obj._get_address_key()\n return NotImplemented\n \n \nclass _IPAddressBase:\n\n ''\n \n __slots__=()\n \n @property\n def exploded(self):\n ''\n return self._explode_shorthand_ip_string()\n \n @property\n def compressed(self):\n ''\n return str(self)\n \n @property\n def reverse_pointer(self):\n ''\n\n\n\n\n\n \n return self._reverse_pointer()\n \n @property\n def version(self):\n msg='%200s has no version specified'%(type(self),)\n raise NotImplementedError(msg)\n \n def _check_int_address(self,address):\n if address <0:\n msg=\"%d (< 0) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._version))\n if address >self._ALL_ONES:\n msg=\"%d (>= 2**%d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,self._max_prefixlen,\n self._version))\n \n def _check_packed_address(self,address,expected_len):\n address_len=len(address)\n if address_len !=expected_len:\n msg=\"%r (len %d != %d) is not permitted as an IPv%d address\"\n raise AddressValueError(msg %(address,address_len,\n expected_len,self._version))\n \n @classmethod\n def _ip_int_from_prefix(cls,prefixlen):\n ''\n\n\n\n\n\n\n\n \n return cls._ALL_ONES ^(cls._ALL_ONES >>prefixlen)\n \n @classmethod\n def _prefix_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n\n\n \n trailing_zeroes=_count_righthand_zero_bits(ip_int,\n cls._max_prefixlen)\n prefixlen=cls._max_prefixlen -trailing_zeroes\n leading_ones=ip_int >>trailing_zeroes\n all_ones=(1 <1:\n return address\n return address[0],cls._max_prefixlen\n \n def __reduce__(self):\n return self.__class__,(str(self),)\n \n \n_address_fmt_re=None\n\n@functools.total_ordering\nclass _BaseAddress(_IPAddressBase):\n\n ''\n\n\n\n \n \n __slots__=()\n \n def __int__(self):\n return self._ip\n \n def __eq__(self,other):\n try :\n return (self._ip ==other._ip\n and self._version ==other._version)\n except AttributeError:\n return NotImplemented\n \n def __lt__(self,other):\n if not isinstance(other,_BaseAddress):\n return NotImplemented\n if self._version !=other._version:\n raise TypeError('%s and %s are not of the same version'%(\n self,other))\n if self._ip !=other._ip:\n return self._ip =0:\n if network+n >broadcast:\n raise IndexError('address out of range')\n return self._address_class(network+n)\n else :\n n +=1\n if broadcast+n other.network_address:\n return 1\n \n if self.netmask other.netmask:\n return 1\n return 0\n \n def _get_networks_key(self):\n ''\n\n\n\n\n\n \n return (self._version,self.network_address,self.netmask)\n \n def subnets(self,prefixlen_diff=1,new_prefix=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==self._max_prefixlen:\n yield self\n return\n \n if new_prefix is not None :\n if new_prefix 0')\n new_prefixlen=self._prefixlen+prefixlen_diff\n \n if new_prefixlen >self._max_prefixlen:\n raise ValueError(\n 'prefix length diff %d is invalid for netblock %s'%(\n new_prefixlen,self))\n \n start=int(self.network_address)\n end=int(self.broadcast_address)+1\n step=(int(self.hostmask)+1)>>prefixlen_diff\n for new_addr in range(start,end,step):\n current=self.__class__((new_addr,new_prefixlen))\n yield current\n \n def supernet(self,prefixlen_diff=1,new_prefix=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._prefixlen ==0:\n return self\n \n if new_prefix is not None :\n if new_prefix >self._prefixlen:\n raise ValueError('new prefix must be shorter')\n if prefixlen_diff !=1:\n raise ValueError('cannot set prefixlen_diff and new_prefix')\n prefixlen_diff=self._prefixlen -new_prefix\n \n new_prefixlen=self.prefixlen -prefixlen_diff\n if new_prefixlen <0:\n raise ValueError(\n 'current prefixlen is %d, cannot have a prefixlen_diff of %d'%\n (self.prefixlen,prefixlen_diff))\n return self.__class__((\n int(self.network_address)&(int(self.netmask)<=a.broadcast_address)\n except AttributeError:\n raise TypeError(f\"Unable to test subnet containment \"\n f\"between {a} and {b}\")\n \n def subnet_of(self,other):\n ''\n return self._is_subnet_of(self,other)\n \n def supernet_of(self,other):\n ''\n return self._is_subnet_of(other,self)\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_reserved and\n self.broadcast_address.is_reserved)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return (self.network_address.is_link_local and\n self.broadcast_address.is_link_local)\n \n @property\n def is_private(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_private and\n self.broadcast_address.is_private)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_unspecified and\n self.broadcast_address.is_unspecified)\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return (self.network_address.is_loopback and\n self.broadcast_address.is_loopback)\n \nclass _BaseV4:\n\n ''\n\n\n\n\n \n \n __slots__=()\n _version=4\n \n _ALL_ONES=(2 **IPV4LENGTH)-1\n \n _max_prefixlen=IPV4LENGTH\n \n \n _netmask_cache={}\n \n def _explode_shorthand_ip_string(self):\n return str(self)\n \n @classmethod\n def _make_netmask(cls,arg):\n ''\n\n\n\n\n\n \n if arg not in cls._netmask_cache:\n if isinstance(arg,int):\n prefixlen=arg\n if not (0 <=prefixlen <=cls._max_prefixlen):\n cls._report_invalid_netmask(prefixlen)\n else :\n try :\n \n prefixlen=cls._prefix_from_prefix_string(arg)\n except NetmaskValueError:\n \n \n prefixlen=cls._prefix_from_ip_string(arg)\n netmask=IPv4Address(cls._ip_int_from_prefix(prefixlen))\n cls._netmask_cache[arg]=netmask,prefixlen\n return cls._netmask_cache[arg]\n \n @classmethod\n def _ip_int_from_string(cls,ip_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not ip_str:\n raise AddressValueError('Address cannot be empty')\n \n octets=ip_str.split('.')\n if len(octets)!=4:\n raise AddressValueError(\"Expected 4 octets in %r\"%ip_str)\n \n try :\n return int.from_bytes(map(cls._parse_octet,octets),'big')\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_octet(cls,octet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not octet_str:\n raise ValueError(\"Empty octet not permitted\")\n \n if not (octet_str.isascii()and octet_str.isdigit()):\n msg=\"Only decimal digits permitted in %r\"\n raise ValueError(msg %octet_str)\n \n \n if len(octet_str)>3:\n msg=\"At most 3 characters permitted in %r\"\n raise ValueError(msg %octet_str)\n \n octet_int=int(octet_str,10)\n if octet_int >255:\n raise ValueError(\"Octet %d (> 255) not permitted\"%octet_int)\n return octet_int\n \n @classmethod\n def _string_from_ip_int(cls,ip_int):\n ''\n\n\n\n\n\n\n\n \n return '.'.join(map(str,ip_int.to_bytes(4,'big')))\n \n def _reverse_pointer(self):\n ''\n\n\n\n \n reverse_octets=str(self).split('.')[::-1]\n return '.'.join(reverse_octets)+'.in-addr.arpa'\n \n @property\n def max_prefixlen(self):\n return self._max_prefixlen\n \n @property\n def version(self):\n return self._version\n \n \nclass IPv4Address(_BaseV4,_BaseAddress):\n\n ''\n \n __slots__=('_ip','__weakref__')\n \n def __init__(self,address):\n \n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if isinstance(address,int):\n self._check_int_address(address)\n self._ip=address\n return\n \n \n if isinstance(address,bytes):\n self._check_packed_address(address,4)\n self._ip=int.from_bytes(address,'big')\n return\n \n \n \n addr_str=str(address)\n if '/'in addr_str:\n raise AddressValueError(\"Unexpected '/' in %r\"%address)\n self._ip=self._ip_int_from_string(addr_str)\n \n @property\n def packed(self):\n ''\n return v4_int_to_packed(self._ip)\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return self in self._constants._reserved_network\n \n @property\n @functools.lru_cache()\n def is_private(self):\n ''\n\n\n\n\n\n \n return any(self in net for net in self._constants._private_networks)\n \n @property\n @functools.lru_cache()\n def is_global(self):\n return self not in self._constants._public_network and not self.is_private\n \n @property\n def is_multicast(self):\n ''\n\n\n\n\n\n \n return self in self._constants._multicast_network\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return self ==self._constants._unspecified_address\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n \n return self in self._constants._loopback_network\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return self in self._constants._linklocal_network\n \n \nclass IPv4Interface(IPv4Address):\n\n def __init__(self,address):\n addr,mask=self._split_addr_prefix(address)\n \n IPv4Address.__init__(self,addr)\n self.network=IPv4Network((addr,mask),strict=False )\n self.netmask=self.network.netmask\n self._prefixlen=self.network._prefixlen\n \n @functools.cached_property\n def hostmask(self):\n return self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(self._string_from_ip_int(self._ip),\n self._prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv4Address.__eq__(self,other)\n if address_equal is NotImplemented or not address_equal:\n return address_equal\n try :\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv4Address.__lt__(self,other)\n if address_less is NotImplemented:\n return NotImplemented\n try :\n return (self.network >16)&0xFFFF))\n parts.append('%x'%(ipv4_int&0xFFFF))\n \n \n \n \n _max_parts=cls._HEXTET_COUNT+1\n if len(parts)>_max_parts:\n msg=\"At most %d colons permitted in %r\"%(_max_parts -1,ip_str)\n raise AddressValueError(msg)\n \n \n \n skip_index=None\n for i in range(1,len(parts)-1):\n if not parts[i]:\n if skip_index is not None :\n \n msg=\"At most one '::' permitted in %r\"%ip_str\n raise AddressValueError(msg)\n skip_index=i\n \n \n \n if skip_index is not None :\n \n parts_hi=skip_index\n parts_lo=len(parts)-skip_index -1\n if not parts[0]:\n parts_hi -=1\n if parts_hi:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n parts_lo -=1\n if parts_lo:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_skipped=cls._HEXTET_COUNT -(parts_hi+parts_lo)\n if parts_skipped <1:\n msg=\"Expected at most %d other parts with '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT -1,ip_str))\n else :\n \n \n \n if len(parts)!=cls._HEXTET_COUNT:\n msg=\"Exactly %d parts expected without '::' in %r\"\n raise AddressValueError(msg %(cls._HEXTET_COUNT,ip_str))\n if not parts[0]:\n msg=\"Leading ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n if not parts[-1]:\n msg=\"Trailing ':' only permitted as part of '::' in %r\"\n raise AddressValueError(msg %ip_str)\n parts_hi=len(parts)\n parts_lo=0\n parts_skipped=0\n \n try :\n \n ip_int=0\n for i in range(parts_hi):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n ip_int <<=16 *parts_skipped\n for i in range(-parts_lo,0):\n ip_int <<=16\n ip_int |=cls._parse_hextet(parts[i])\n return ip_int\n except ValueError as exc:\n raise AddressValueError(\"%s in %r\"%(exc,ip_str))from None\n \n @classmethod\n def _parse_hextet(cls,hextet_str):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not cls._HEX_DIGITS.issuperset(hextet_str):\n raise ValueError(\"Only hex digits permitted in %r\"%hextet_str)\n \n \n if len(hextet_str)>4:\n msg=\"At most 4 characters permitted in %r\"\n raise ValueError(msg %hextet_str)\n \n return int(hextet_str,16)\n \n @classmethod\n def _compress_hextets(cls,hextets):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n best_doublecolon_start=-1\n best_doublecolon_len=0\n doublecolon_start=-1\n doublecolon_len=0\n for index,hextet in enumerate(hextets):\n if hextet =='0':\n doublecolon_len +=1\n if doublecolon_start ==-1:\n \n doublecolon_start=index\n if doublecolon_len >best_doublecolon_len:\n \n best_doublecolon_len=doublecolon_len\n best_doublecolon_start=doublecolon_start\n else :\n doublecolon_len=0\n doublecolon_start=-1\n \n if best_doublecolon_len >1:\n best_doublecolon_end=(best_doublecolon_start+\n best_doublecolon_len)\n \n if best_doublecolon_end ==len(hextets):\n hextets +=['']\n hextets[best_doublecolon_start:best_doublecolon_end]=['']\n \n if best_doublecolon_start ==0:\n hextets=['']+hextets\n \n return hextets\n \n @classmethod\n def _string_from_ip_int(cls,ip_int=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if ip_int is None :\n ip_int=int(cls._ip)\n \n if ip_int >cls._ALL_ONES:\n raise ValueError('IPv6 address is too large')\n \n hex_str='%032x'%ip_int\n hextets=['%x'%int(hex_str[x:x+4],16)for x in range(0,32,4)]\n \n hextets=cls._compress_hextets(hextets)\n return ':'.join(hextets)\n \n def _explode_shorthand_ip_string(self):\n ''\n\n\n\n\n\n\n\n \n if isinstance(self,IPv6Network):\n ip_str=str(self.network_address)\n elif isinstance(self,IPv6Interface):\n ip_str=str(self.ip)\n else :\n ip_str=str(self)\n \n ip_int=self._ip_int_from_string(ip_str)\n hex_str='%032x'%ip_int\n parts=[hex_str[x:x+4]for x in range(0,32,4)]\n if isinstance(self,(_BaseNetwork,IPv6Interface)):\n return '%s/%d'%(':'.join(parts),self._prefixlen)\n return ':'.join(parts)\n \n def _reverse_pointer(self):\n ''\n\n\n\n \n reverse_chars=self.exploded[::-1].replace(':','')\n return '.'.join(reverse_chars)+'.ip6.arpa'\n \n @staticmethod\n def _split_scope_id(ip_str):\n ''\n\n\n\n\n\n\n\n\n\n \n addr,sep,scope_id=ip_str.partition('%')\n if not sep:\n scope_id=None\n elif not scope_id or '%'in scope_id:\n raise AddressValueError('Invalid IPv6 address: \"%r\"'%ip_str)\n return addr,scope_id\n \n @property\n def max_prefixlen(self):\n return self._max_prefixlen\n \n @property\n def version(self):\n return self._version\n \n \nclass IPv6Address(_BaseV6,_BaseAddress):\n\n ''\n \n __slots__=('_ip','_scope_id','__weakref__')\n \n def __init__(self,address):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if isinstance(address,int):\n self._check_int_address(address)\n self._ip=address\n self._scope_id=None\n return\n \n \n if isinstance(address,bytes):\n self._check_packed_address(address,16)\n self._ip=int.from_bytes(address,'big')\n self._scope_id=None\n return\n \n \n \n addr_str=str(address)\n if '/'in addr_str:\n raise AddressValueError(\"Unexpected '/' in %r\"%address)\n addr_str,self._scope_id=self._split_scope_id(addr_str)\n \n self._ip=self._ip_int_from_string(addr_str)\n \n def __str__(self):\n ip_str=super().__str__()\n return ip_str+'%'+self._scope_id if self._scope_id else ip_str\n \n def __hash__(self):\n return hash((self._ip,self._scope_id))\n \n def __eq__(self,other):\n address_equal=super().__eq__(other)\n if address_equal is NotImplemented:\n return NotImplemented\n if not address_equal:\n return False\n return self._scope_id ==getattr(other,'_scope_id',None )\n \n @property\n def scope_id(self):\n ''\n\n\n\n\n\n\n \n return self._scope_id\n \n @property\n def packed(self):\n ''\n return v6_int_to_packed(self._ip)\n \n @property\n def is_multicast(self):\n ''\n\n\n\n\n\n \n return self in self._constants._multicast_network\n \n @property\n def is_reserved(self):\n ''\n\n\n\n\n\n \n return any(self in x for x in self._constants._reserved_networks)\n \n @property\n def is_link_local(self):\n ''\n\n\n\n\n \n return self in self._constants._linklocal_network\n \n @property\n def is_site_local(self):\n ''\n\n\n\n\n\n\n\n\n \n return self in self._constants._sitelocal_network\n \n @property\n @functools.lru_cache()\n def is_private(self):\n ''\n\n\n\n\n\n \n return any(self in net for net in self._constants._private_networks)\n \n @property\n def is_global(self):\n ''\n\n\n\n\n\n \n return not self.is_private\n \n @property\n def is_unspecified(self):\n ''\n\n\n\n\n\n \n return self._ip ==0\n \n @property\n def is_loopback(self):\n ''\n\n\n\n\n\n \n return self._ip ==1\n \n @property\n def ipv4_mapped(self):\n ''\n\n\n\n\n\n \n if (self._ip >>32)!=0xFFFF:\n return None\n return IPv4Address(self._ip&0xFFFFFFFF)\n \n @property\n def teredo(self):\n ''\n\n\n\n\n\n\n \n if (self._ip >>96)!=0x20010000:\n return None\n return (IPv4Address((self._ip >>64)&0xFFFFFFFF),\n IPv4Address(~self._ip&0xFFFFFFFF))\n \n @property\n def sixtofour(self):\n ''\n\n\n\n\n\n \n if (self._ip >>112)!=0x2002:\n return None\n return IPv4Address((self._ip >>80)&0xFFFFFFFF)\n \n \nclass IPv6Interface(IPv6Address):\n\n def __init__(self,address):\n addr,mask=self._split_addr_prefix(address)\n \n IPv6Address.__init__(self,addr)\n self.network=IPv6Network((addr,mask),strict=False )\n self.netmask=self.network.netmask\n self._prefixlen=self.network._prefixlen\n \n @functools.cached_property\n def hostmask(self):\n return self.network.hostmask\n \n def __str__(self):\n return '%s/%d'%(super().__str__(),\n self._prefixlen)\n \n def __eq__(self,other):\n address_equal=IPv6Address.__eq__(self,other)\n if address_equal is NotImplemented or not address_equal:\n return address_equal\n try :\n return self.network ==other.network\n except AttributeError:\n \n \n \n return False\n \n def __lt__(self,other):\n address_less=IPv6Address.__lt__(self,other)\n if address_less is NotImplemented:\n return address_less\n try :\n return (self.network self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else :\n try :\n for i in reversed(range(self.r)):\n if self.indices[i]!=i+self.n -self.r:\n break\n self.indices[i]+=1\n for j in range(i+1,self.r):\n self.indices[j]=self.indices[j -1]+1\n return tuple(self.pool[i]for i in self.indices)\n except :\n raise StopIteration\n \nclass combinations_with_replacement:\n def __init__(self,iterable,r):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=r\n self.indices=[0]*self.r\n self.zero=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if not self.n and self.r:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in self.indices)\n else :\n try :\n for i in reversed(range(self.r)):\n if self.indices[i]!=self.n -1:\n break\n self.indices[i:]=[self.indices[i]+1]*(self.r -i)\n return tuple(self.pool[i]for i in self.indices)\n except :\n raise StopIteration\n \n \n \nclass compress:\n def __init__(self,data,selectors):\n self.data=iter(data)\n self.selectors=iter(selectors)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n while True :\n next_item=next(self.data)\n next_selector=next(self.selectors)\n if bool(next_selector):\n return next_item\n \n \n \n \nclass count:\n ''\n\n\n\n \n def __init__(self,start=0,step=1):\n if not isinstance(start,(int,float)):\n raise TypeError('a number is required')\n self.times=start -step\n self.step=step\n \n def __iter__(self):\n return self\n \n def __next__(self):\n self.times +=self.step\n return self.times\n \n def __repr__(self):\n return 'count(%d)'%(self.times+self.step)\n \n \n \nclass cycle:\n def __init__(self,iterable):\n self._cur_iter=iter(iterable)\n self._saved=[]\n self._must_save=True\n \n def __iter__(self):\n return self\n \n def __next__(self):\n try :\n next_elt=next(self._cur_iter)\n if self._must_save:\n self._saved.append(next_elt)\n except StopIteration:\n self._cur_iter=iter(self._saved)\n next_elt=next(self._cur_iter)\n self._must_save=False\n return next_elt\n \n \n \nclass dropwhile:\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n self._dropped=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if self._dropped:\n return value\n while self._predicate(value):\n value=next(self._iter)\n self._dropped=True\n return value\n \n \n \nclass filterfalse:\n def __init__(self,predicate,iterable):\n \n self._iter=iter(iterable)\n if predicate is None :\n self._predicate=bool\n else :\n self._predicate=predicate\n \n def __iter__(self):\n return self\n def __next__(self):\n next_elt=next(self._iter)\n while True :\n if not self._predicate(next_elt):\n return next_elt\n next_elt=next(self._iter)\n \nclass groupby:\n\n\n def __init__(self,iterable,key=None ):\n if key is None :\n key=lambda x:x\n self.keyfunc=key\n self.it=iter(iterable)\n self.tgtkey=self.currkey=self.currvalue=object()\n def __iter__(self):\n return self\n def __next__(self):\n while self.currkey ==self.tgtkey:\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n self.tgtkey=self.currkey\n return (self.currkey,self._grouper(self.tgtkey))\n def _grouper(self,tgtkey):\n while self.currkey ==tgtkey:\n yield self.currvalue\n self.currvalue=next(self.it)\n self.currkey=self.keyfunc(self.currvalue)\n \n \n \nclass islice:\n def __init__(self,iterable,*args):\n s=slice(*args)\n self.start,self.stop,self.step=s.start or 0,s.stop,s.step\n if not isinstance(self.start,int):\n raise ValueError(\"Start argument must be an integer\")\n if self.stop !=None and not isinstance(self.stop,int):\n raise ValueError(\"Stop argument must be an integer or None\")\n if self.step is None :\n self.step=1\n if self.start <0 or (self.stop !=None and self.stop <0\n )or self.step <=0:\n raise ValueError(\"indices for islice() must be positive\")\n self.it=iter(iterable)\n self.donext=None\n self.cnt=0\n \n def __iter__(self):\n return self\n \n def __next__(self):\n nextindex=self.start\n if self.stop !=None and nextindex >=self.stop:\n raise StopIteration\n while self.cnt <=nextindex:\n nextitem=next(self.it)\n self.cnt +=1\n self.start +=self.step\n return nextitem\n \nclass permutations:\n def __init__(self,iterable,r=None ):\n self.pool=tuple(iterable)\n self.n=len(self.pool)\n self.r=self.n if r is None else r\n self.indices=list(range(self.n))\n self.cycles=list(range(self.n,self.n -self.r,-1))\n self.zero=False\n self.stop=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n indices=self.indices\n if self.r >self.n:\n raise StopIteration\n if not self.zero:\n self.zero=True\n return tuple(self.pool[i]for i in indices[:self.r])\n \n i=self.r -1\n while i >=0:\n j=self.cycles[i]-1\n if j >0:\n self.cycles[i]=j\n indices[i],indices[-j]=indices[-j],indices[i]\n return tuple(self.pool[i]for i in indices[:self.r])\n self.cycles[i]=len(indices)-i\n n1=len(indices)-1\n assert n1 >=0\n num=indices[i]\n for k in range(i,n1):\n indices[k]=indices[k+1]\n indices[n1]=num\n i -=1\n raise StopIteration\n \n \ndef product(*args,repeat=1):\n\n\n pools=[tuple(pool)for pool in args]*repeat\n result=[[]]\n for pool in pools:\n result=[x+[y]for x in result for y in pool]\n for prod in result:\n yield tuple(prod)\n \n \n \n \n \n \n \n \nclass _product:\n def __init__(self,*args,**kw):\n if len(kw)>1:\n raise TypeError(\"product() takes at most 1 argument (%d given)\"%\n len(kw))\n self.repeat=kw.get('repeat',1)\n if not isinstance(self.repeat,int):\n raise TypeError(\"integer argument expected, got %s\"%\n type(self.repeat))\n self.gears=[x for x in args]*self.repeat\n self.num_gears=len(self.gears)\n \n self.indicies=[(0,len(self.gears[x]))\n for x in range(0,self.num_gears)]\n self.cont=True\n self.zero=False\n \n def roll_gears(self):\n \n \n \n should_carry=True\n for n in range(0,self.num_gears):\n nth_gear=self.num_gears -n -1\n if should_carry:\n count,lim=self.indicies[nth_gear]\n count +=1\n if count ==lim and nth_gear ==0:\n self.cont=False\n if count ==lim:\n should_carry=True\n count=0\n else :\n should_carry=False\n self.indicies[nth_gear]=(count,lim)\n else :\n break\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if self.zero:\n raise StopIteration\n if self.repeat >0:\n if not self.cont:\n raise StopIteration\n l=[]\n for x in range(0,self.num_gears):\n index,limit=self.indicies[x]\n print('itertools 353',self.gears,x,index)\n l.append(self.gears[x][index])\n self.roll_gears()\n return tuple(l)\n elif self.repeat ==0:\n self.zero=True\n return ()\n else :\n raise ValueError(\"repeat argument cannot be negative\")\n \n \n \nclass repeat:\n def __init__(self,obj,times=None ):\n self._obj=obj\n if times is not None :\n range(times)\n if times <0:\n times=0\n self._times=times\n \n def __iter__(self):\n return self\n \n def __next__(self):\n \n if self._times is not None :\n if self._times <=0:\n raise StopIteration()\n self._times -=1\n return self._obj\n \n def __repr__(self):\n if self._times is not None :\n return 'repeat(%r, %r)'%(self._obj,self._times)\n else :\n return 'repeat(%r)'%(self._obj,)\n \n def __len__(self):\n if self._times ==-1 or self._times is None :\n raise TypeError(\"len() of uniszed object\")\n return self._times\n \n \n \nclass starmap(object):\n def __init__(self,function,iterable):\n self._func=function\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n t=next(self._iter)\n return self._func(*t)\n \n \n \nclass takewhile(object):\n def __init__(self,predicate,iterable):\n self._predicate=predicate\n self._iter=iter(iterable)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n value=next(self._iter)\n if not self._predicate(value):\n raise StopIteration()\n return value\n \n \n \nclass TeeData(object):\n def __init__(self,iterator):\n self.data=[]\n self._iter=iterator\n \n def __getitem__(self,i):\n \n while i >=len(self.data):\n self.data.append(next(self._iter))\n return self.data[i]\n \n \nclass TeeObject(object):\n def __init__(self,iterable=None ,tee_data=None ):\n if tee_data:\n self.tee_data=tee_data\n self.pos=0\n \n elif isinstance(iterable,TeeObject):\n self.tee_data=iterable.tee_data\n self.pos=iterable.pos\n else :\n self.tee_data=TeeData(iter(iterable))\n self.pos=0\n \n def __next__(self):\n data=self.tee_data[self.pos]\n self.pos +=1\n return data\n \n def __iter__(self):\n return self\n \n \ndef tee(iterable,n=2):\n if isinstance(iterable,TeeObject):\n return tuple([iterable]+\n [TeeObject(tee_data=iterable.tee_data)for i in range(n -1)])\n tee_data=TeeData(iter(iterable))\n return tuple([TeeObject(tee_data=tee_data)for i in range(n)])\n \nclass zip_longest:\n def __init__(self,*args,fillvalue=None ):\n self.args=[iter(arg)for arg in args]\n self.fillvalue=fillvalue\n self.units=len(args)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n temp=[]\n nb=0\n for i in range(self.units):\n try :\n temp.append(next(self.args[i]))\n nb +=1\n except StopIteration:\n temp.append(self.fillvalue)\n if nb ==0:\n raise StopIteration\n return tuple(temp)\n", ["operator"]], "json": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='2.0.9'\n__all__=[\n'dump','dumps','load','loads',\n'JSONDecoder','JSONDecodeError','JSONEncoder',\n]\n\n__author__='Bob Ippolito '\n\n\n\n\n\nclass codecs:\n\n BOM_UTF8=b'\\xef\\xbb\\xbf'\n BOM_LE=BOM_UTF16_LE=b'\\xff\\xfe'\n BOM_BE=BOM_UTF16_BE=b'\\xfe\\xff'\n BOM_UTF32_LE=b'\\xff\\xfe\\x00\\x00'\n BOM_UTF32_BE=b'\\x00\\x00\\xfe\\xff'\n \nimport _json\n\nclass JSONDecodeError(ValueError):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,msg,doc,pos):\n lineno=doc.count('\\n',0,pos)+1\n colno=pos -doc.rfind('\\n',0,pos)\n errmsg='%s: line %d column %d (char %d)'%(msg,lineno,colno,pos)\n ValueError.__init__(self,errmsg)\n self.msg=msg\n self.doc=doc\n self.pos=pos\n self.lineno=lineno\n self.colno=colno\n \n def __reduce__(self):\n return self.__class__,(self.msg,self.doc,self.pos)\n \ndef dump(obj,fp,**kw):\n fp.write(dumps(obj,**kw))\n \ndef dumps(obj,*,cls=None ,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if cls is None :\n return _json.dumps(obj,1,**kw)\n return cls(\n skipkeys=skipkeys,ensure_ascii=ensure_ascii,\n check_circular=check_circular,allow_nan=allow_nan,indent=indent,\n separators=separators,default=default,sort_keys=sort_keys,\n **kw).encode(obj)\n \ndef detect_encoding(b):\n bstartswith=b.startswith\n if bstartswith((codecs.BOM_UTF32_BE,codecs.BOM_UTF32_LE)):\n return 'utf-32'\n if bstartswith((codecs.BOM_UTF16_BE,codecs.BOM_UTF16_LE)):\n return 'utf-16'\n if bstartswith(codecs.BOM_UTF8):\n return 'utf-8-sig'\n \n if len(b)>=4:\n if not b[0]:\n \n \n return 'utf-16-be'if b[1]else 'utf-32-be'\n if not b[1]:\n \n \n \n return 'utf-16-le'if b[2]or b[3]else 'utf-32-le'\n elif len(b)==2:\n if not b[0]:\n \n return 'utf-16-be'\n if not b[1]:\n \n return 'utf-16-le'\n \n return 'utf-8'\n \n \ndef load(fp,*,cls=None ,object_hook=None ,parse_float=None ,\nparse_int=None ,parse_constant=None ,object_pairs_hook=None ,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return loads(fp.read(),\n cls=cls,object_hook=object_hook,\n parse_float=parse_float,parse_int=parse_int,\n parse_constant=parse_constant,object_pairs_hook=object_pairs_hook,**kw)\n \n \ndef loads(s,*,cls=None ,**kw):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(s,str):\n if s.startswith('\\ufeff'):\n raise JSONDecodeError(\"Unexpected UTF-8 BOM (decode using utf-8-sig)\",\n s,0)\n else :\n if not isinstance(s,(bytes,bytearray)):\n raise TypeError(f'the JSON object must be str, bytes or bytearray, '\n f'not {s.__class__.__name__}')\n s=s.decode(detect_encoding(s),'surrogatepass')\n \n \n if \"encoding\"in kw:\n import warnings\n warnings.warn(\n \"'encoding' is ignored and deprecated. It will be removed in Python 3.9\",\n DeprecationWarning,\n stacklevel=2\n )\n del kw['encoding']\n \n if cls is None :\n \n \n return _json.loads(s,**kw)\n if object_hook is not None :\n kw['object_hook']=object_hook\n if object_pairs_hook is not None :\n kw['object_pairs_hook']=object_pairs_hook\n if parse_float is not None :\n kw['parse_float']=parse_float\n if parse_int is not None :\n kw['parse_int']=parse_int\n if parse_constant is not None :\n kw['parse_constant']=parse_constant\n return cls(**kw).decode(s)\n", ["_json", "warnings"]], "keyword": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\"iskeyword\",\"issoftkeyword\",\"kwlist\",\"softkwlist\"]\n\nkwlist=[\n'False',\n'None',\n'True',\n'__peg_parser__',\n'and',\n'as',\n'assert',\n'async',\n'await',\n'break',\n'class',\n'continue',\n'def',\n'del',\n'elif',\n'else',\n'except',\n'finally',\n'for',\n'from',\n'global',\n'if',\n'import',\n'in',\n'is',\n'lambda',\n'nonlocal',\n'not',\n'or',\n'pass',\n'raise',\n'return',\n'try',\n'while',\n'with',\n'yield'\n]\n\nsoftkwlist=[\n\n]\n\niskeyword=frozenset(kwlist).__contains__\nissoftkeyword=frozenset(softkwlist).__contains__\n", []], "linecache": [".py", "''\n\n\n\n\n\n\nimport functools\nimport sys\nimport os\nimport tokenize\n\n__all__=[\"getline\",\"clearcache\",\"checkcache\",\"lazycache\"]\n\n\n\n\ncache={}\n\n\ndef clearcache():\n ''\n cache.clear()\n \n \ndef getline(filename,lineno,module_globals=None ):\n ''\n \n \n lines=getlines(filename,module_globals)\n if 1 <=lineno <=len(lines):\n return lines[lineno -1]\n return ''\n \n \ndef getlines(filename,module_globals=None ):\n ''\n \n \n if filename in cache:\n entry=cache[filename]\n if len(entry)!=1:\n return cache[filename][2]\n \n try :\n return updatecache(filename,module_globals)\n except MemoryError:\n clearcache()\n return []\n \n \ndef checkcache(filename=None ):\n ''\n \n \n if filename is None :\n filenames=list(cache.keys())\n elif filename in cache:\n filenames=[filename]\n else :\n return\n \n for filename in filenames:\n entry=cache[filename]\n if len(entry)==1:\n \n continue\n size,mtime,lines,fullname=entry\n if mtime is None :\n continue\n try :\n stat=os.stat(fullname)\n except OSError:\n cache.pop(filename,None )\n continue\n if size !=stat.st_size or mtime !=stat.st_mtime:\n cache.pop(filename,None )\n \n \ndef updatecache(filename,module_globals=None ):\n ''\n\n \n \n if filename in cache:\n if len(cache[filename])!=1:\n cache.pop(filename,None )\n if not filename or (filename.startswith('<')and filename.endswith('>')):\n return []\n \n fullname=filename\n try :\n stat=os.stat(fullname)\n except OSError:\n basename=filename\n \n \n \n if lazycache(filename,module_globals):\n try :\n data=cache[filename][0]()\n except (ImportError,OSError):\n pass\n else :\n if data is None :\n \n \n return []\n cache[filename]=(\n len(data),\n None ,\n [line+'\\n'for line in data.splitlines()],\n fullname\n )\n return cache[filename][2]\n \n \n \n if os.path.isabs(filename):\n return []\n \n for dirname in sys.path:\n try :\n fullname=os.path.join(dirname,basename)\n except (TypeError,AttributeError):\n \n continue\n try :\n stat=os.stat(fullname)\n break\n except OSError:\n pass\n else :\n return []\n try :\n with tokenize.open(fullname)as fp:\n lines=fp.readlines()\n except OSError:\n return []\n if lines and not lines[-1].endswith('\\n'):\n lines[-1]+='\\n'\n size,mtime=stat.st_size,stat.st_mtime\n cache[filename]=size,mtime,lines,fullname\n return lines\n \n \ndef lazycache(filename,module_globals):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if filename in cache:\n if len(cache[filename])==1:\n return True\n else :\n return False\n if not filename or (filename.startswith('<')and filename.endswith('>')):\n return False\n \n if module_globals and '__loader__'in module_globals:\n name=module_globals.get('__name__')\n loader=module_globals['__loader__']\n get_source=getattr(loader,'get_source',None )\n \n if name and get_source:\n get_lines=functools.partial(get_source,name)\n cache[filename]=(get_lines,)\n return True\n return False\n", ["functools", "os", "sys", "tokenize"]], "locale": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport encodings\nimport encodings.aliases\nimport re\nimport _collections_abc\nfrom builtins import str as _builtin_str\nimport functools\n\n\n\n\n\n\n\n__all__=[\"getlocale\",\"getdefaultlocale\",\"getpreferredencoding\",\"Error\",\n\"setlocale\",\"resetlocale\",\"localeconv\",\"strcoll\",\"strxfrm\",\n\"str\",\"atof\",\"atoi\",\"format\",\"format_string\",\"currency\",\n\"normalize\",\"LC_CTYPE\",\"LC_COLLATE\",\"LC_TIME\",\"LC_MONETARY\",\n\"LC_NUMERIC\",\"LC_ALL\",\"CHAR_MAX\"]\n\ndef _strcoll(a,b):\n ''\n\n \n return (a >b)-(a .*?)\\))?'\nr'(?P[-#0-9 +*.hlL]*?)[eEfFgGdiouxXcrs%]')\n\ndef _format(percent,value,grouping=False ,monetary=False ,*additional):\n if additional:\n formatted=percent %((value,)+additional)\n else :\n formatted=percent %value\n \n if percent[-1]in 'eEfFgG':\n seps=0\n parts=formatted.split('.')\n if grouping:\n parts[0],seps=_group(parts[0],monetary=monetary)\n decimal_point=localeconv()[monetary and 'mon_decimal_point'\n or 'decimal_point']\n formatted=decimal_point.join(parts)\n if seps:\n formatted=_strip_padding(formatted,seps)\n elif percent[-1]in 'diu':\n seps=0\n if grouping:\n formatted,seps=_group(formatted,monetary=monetary)\n if seps:\n formatted=_strip_padding(formatted,seps)\n return formatted\n \ndef format_string(f,val,grouping=False ,monetary=False ):\n ''\n\n\n\n\n \n percents=list(_percent_re.finditer(f))\n new_f=_percent_re.sub('%s',f)\n \n if isinstance(val,_collections_abc.Mapping):\n new_val=[]\n for perc in percents:\n if perc.group()[-1]=='%':\n new_val.append('%')\n else :\n new_val.append(_format(perc.group(),val,grouping,monetary))\n else :\n if not isinstance(val,tuple):\n val=(val,)\n new_val=[]\n i=0\n for perc in percents:\n if perc.group()[-1]=='%':\n new_val.append('%')\n else :\n starcount=perc.group('modifiers').count('*')\n new_val.append(_format(perc.group(),\n val[i],\n grouping,\n monetary,\n *val[i+1:i+1+starcount]))\n i +=(1+starcount)\n val=tuple(new_val)\n \n return new_f %val\n \ndef format(percent,value,grouping=False ,monetary=False ,*additional):\n ''\n import warnings\n warnings.warn(\n \"This method will be removed in a future version of Python. \"\n \"Use 'locale.format_string()' instead.\",\n DeprecationWarning,stacklevel=2\n )\n \n match=_percent_re.match(percent)\n if not match or len(match.group())!=len(percent):\n raise ValueError((\"format() must be given exactly one %%char \"\n \"format specifier, %s not valid\")%repr(percent))\n return _format(percent,value,grouping,monetary,*additional)\n \ndef currency(val,symbol=True ,grouping=False ,international=False ):\n ''\n \n conv=localeconv()\n \n \n digits=conv[international and 'int_frac_digits'or 'frac_digits']\n if digits ==127:\n raise ValueError(\"Currency formatting is not possible using \"\n \"the 'C' locale.\")\n \n s=_format('%%.%if'%digits,abs(val),grouping,monetary=True )\n \n s='<'+s+'>'\n \n if symbol:\n smb=conv[international and 'int_curr_symbol'or 'currency_symbol']\n precedes=conv[val <0 and 'n_cs_precedes'or 'p_cs_precedes']\n separated=conv[val <0 and 'n_sep_by_space'or 'p_sep_by_space']\n \n if precedes:\n s=smb+(separated and ' 'or '')+s\n else :\n if international and smb[-1]==' ':\n smb=smb[:-1]\n s=s+(separated and ' 'or '')+smb\n \n sign_pos=conv[val <0 and 'n_sign_posn'or 'p_sign_posn']\n sign=conv[val <0 and 'negative_sign'or 'positive_sign']\n \n if sign_pos ==0:\n s='('+s+')'\n elif sign_pos ==1:\n s=sign+s\n elif sign_pos ==2:\n s=s+sign\n elif sign_pos ==3:\n s=s.replace('<',sign)\n elif sign_pos ==4:\n s=s.replace('>',sign)\n else :\n \n \n s=sign+s\n \n return s.replace('<','').replace('>','')\n \ndef str(val):\n ''\n return _format(\"%.12g\",val)\n \ndef delocalize(string):\n ''\n \n conv=localeconv()\n \n \n ts=conv['thousands_sep']\n if ts:\n string=string.replace(ts,'')\n \n \n dd=conv['decimal_point']\n if dd:\n string=string.replace(dd,'.')\n return string\n \ndef atof(string,func=float):\n ''\n return func(delocalize(string))\n \ndef atoi(string):\n ''\n return int(delocalize(string))\n \ndef _test():\n setlocale(LC_ALL,\"\")\n \n s1=format_string(\"%d\",123456789,1)\n print(s1,\"is\",atoi(s1))\n \n s1=str(3.14)\n print(s1,\"is\",atof(s1))\n \n \n \n \n \n \n \n \n_setlocale=setlocale\n\ndef _replace_encoding(code,encoding):\n if '.'in code:\n langname=code[:code.index('.')]\n else :\n langname=code\n \n norm_encoding=encodings.normalize_encoding(encoding)\n \n norm_encoding=encodings.aliases.aliases.get(norm_encoding.lower(),\n norm_encoding)\n \n encoding=norm_encoding\n norm_encoding=norm_encoding.lower()\n if norm_encoding in locale_encoding_alias:\n encoding=locale_encoding_alias[norm_encoding]\n else :\n norm_encoding=norm_encoding.replace('_','')\n norm_encoding=norm_encoding.replace('-','')\n if norm_encoding in locale_encoding_alias:\n encoding=locale_encoding_alias[norm_encoding]\n \n return langname+'.'+encoding\n \ndef _append_modifier(code,modifier):\n if modifier =='euro':\n if '.'not in code:\n return code+'.ISO8859-15'\n _,_,encoding=code.partition('.')\n if encoding in ('ISO8859-15','UTF-8'):\n return code\n if encoding =='ISO8859-1':\n return _replace_encoding(code,'ISO8859-15')\n return code+'@'+modifier\n \ndef normalize(localename):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n code=localename.lower()\n if ':'in code:\n \n code=code.replace(':','.')\n if '@'in code:\n code,modifier=code.split('@',1)\n else :\n modifier=''\n if '.'in code:\n langname,encoding=code.split('.')[:2]\n else :\n langname=code\n encoding=''\n \n \n lang_enc=langname\n if encoding:\n norm_encoding=encoding.replace('-','')\n norm_encoding=norm_encoding.replace('_','')\n lang_enc +='.'+norm_encoding\n lookup_name=lang_enc\n if modifier:\n lookup_name +='@'+modifier\n code=locale_alias.get(lookup_name,None )\n if code is not None :\n return code\n \n \n if modifier:\n \n code=locale_alias.get(lang_enc,None )\n if code is not None :\n \n if '@'not in code:\n return _append_modifier(code,modifier)\n if code.split('@',1)[1].lower()==modifier:\n return code\n \n \n if encoding:\n \n lookup_name=langname\n if modifier:\n lookup_name +='@'+modifier\n code=locale_alias.get(lookup_name,None )\n if code is not None :\n \n if '@'not in code:\n return _replace_encoding(code,encoding)\n code,modifier=code.split('@',1)\n return _replace_encoding(code,encoding)+'@'+modifier\n \n if modifier:\n \n code=locale_alias.get(langname,None )\n if code is not None :\n \n if '@'not in code:\n code=_replace_encoding(code,encoding)\n return _append_modifier(code,modifier)\n code,defmod=code.split('@',1)\n if defmod.lower()==modifier:\n return _replace_encoding(code,encoding)+'@'+defmod\n \n return localename\n \ndef _parse_localename(localename):\n\n ''\n\n\n\n\n\n\n\n\n\n\n \n code=normalize(localename)\n if '@'in code:\n \n code,modifier=code.split('@',1)\n if modifier =='euro'and '.'not in code:\n \n \n \n return code,'iso-8859-15'\n \n if '.'in code:\n return tuple(code.split('.')[:2])\n elif code =='C':\n return None ,None\n elif code =='UTF-8':\n \n \n return None ,'UTF-8'\n raise ValueError('unknown locale: %s'%localename)\n \ndef _build_localename(localetuple):\n\n ''\n\n\n\n\n \n try :\n language,encoding=localetuple\n \n if language is None :\n language='C'\n if encoding is None :\n return language\n else :\n return language+'.'+encoding\n except (TypeError,ValueError):\n raise TypeError('Locale must be None, a string, or an iterable of '\n 'two strings -- language code, encoding.')from None\n \ndef getdefaultlocale(envvars=('LC_ALL','LC_CTYPE','LANG','LANGUAGE')):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try :\n \n import _locale\n code,encoding=_locale._getdefaultlocale()\n except (ImportError,AttributeError):\n pass\n else :\n \n if sys.platform ==\"win32\"and code and code[:2]==\"0x\":\n \n code=windows_locale.get(int(code,0))\n \n \n return code,encoding\n \n \n import os\n lookup=os.environ.get\n for variable in envvars:\n localename=lookup(variable,None )\n if localename:\n if variable =='LANGUAGE':\n localename=localename.split(':')[0]\n break\n else :\n localename='C'\n return _parse_localename(localename)\n \n \ndef getlocale(category=LC_CTYPE):\n\n ''\n\n\n\n\n\n\n\n\n\n \n localename=_setlocale(category)\n if category ==LC_ALL and ';'in localename:\n raise TypeError('category LC_ALL is not supported')\n return _parse_localename(localename)\n \ndef setlocale(category,locale=None ):\n\n ''\n\n\n\n\n\n\n\n\n \n if locale and not isinstance(locale,_builtin_str):\n \n locale=normalize(_build_localename(locale))\n return _setlocale(category,locale)\n \ndef resetlocale(category=LC_ALL):\n\n ''\n\n\n\n\n \n _setlocale(category,_build_localename(getdefaultlocale()))\n \nif sys.platform.startswith(\"win\"):\n\n def getpreferredencoding(do_setlocale=True ):\n ''\n if sys.flags.utf8_mode:\n return 'UTF-8'\n import _bootlocale\n return _bootlocale.getpreferredencoding(False )\nelse :\n\n try :\n CODESET\n except NameError:\n if hasattr(sys,'getandroidapilevel'):\n \n \n def getpreferredencoding(do_setlocale=True ):\n return 'UTF-8'\n else :\n \n def getpreferredencoding(do_setlocale=True ):\n ''\n \n if sys.flags.utf8_mode:\n return 'UTF-8'\n res=getdefaultlocale()[1]\n if res is None :\n \n res='ascii'\n return res\n else :\n def getpreferredencoding(do_setlocale=True ):\n ''\n \n if sys.flags.utf8_mode:\n return 'UTF-8'\n import _bootlocale\n if do_setlocale:\n oldloc=setlocale(LC_CTYPE)\n try :\n setlocale(LC_CTYPE,\"\")\n except Error:\n pass\n result=_bootlocale.getpreferredencoding(False )\n if do_setlocale:\n setlocale(LC_CTYPE,oldloc)\n return result\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \nlocale_encoding_alias={\n\n\n'437':'C',\n'c':'C',\n'en':'ISO8859-1',\n'jis':'JIS7',\n'jis7':'JIS7',\n'ajec':'eucJP',\n'koi8c':'KOI8-C',\n'microsoftcp1251':'CP1251',\n'microsoftcp1255':'CP1255',\n'microsoftcp1256':'CP1256',\n'88591':'ISO8859-1',\n'88592':'ISO8859-2',\n'88595':'ISO8859-5',\n'885915':'ISO8859-15',\n\n\n'ascii':'ISO8859-1',\n'latin_1':'ISO8859-1',\n'iso8859_1':'ISO8859-1',\n'iso8859_10':'ISO8859-10',\n'iso8859_11':'ISO8859-11',\n'iso8859_13':'ISO8859-13',\n'iso8859_14':'ISO8859-14',\n'iso8859_15':'ISO8859-15',\n'iso8859_16':'ISO8859-16',\n'iso8859_2':'ISO8859-2',\n'iso8859_3':'ISO8859-3',\n'iso8859_4':'ISO8859-4',\n'iso8859_5':'ISO8859-5',\n'iso8859_6':'ISO8859-6',\n'iso8859_7':'ISO8859-7',\n'iso8859_8':'ISO8859-8',\n'iso8859_9':'ISO8859-9',\n'iso2022_jp':'JIS7',\n'shift_jis':'SJIS',\n'tactis':'TACTIS',\n'euc_jp':'eucJP',\n'euc_kr':'eucKR',\n'utf_8':'UTF-8',\n'koi8_r':'KOI8-R',\n'koi8_t':'KOI8-T',\n'koi8_u':'KOI8-U',\n'kz1048':'RK1048',\n'cp1251':'CP1251',\n'cp1255':'CP1255',\n'cp1256':'CP1256',\n\n\n\n}\n\nfor k,v in sorted(locale_encoding_alias.items()):\n k=k.replace('_','')\n locale_encoding_alias.setdefault(k,v)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \nlocale_alias={\n'a3':'az_AZ.KOI8-C',\n'a3_az':'az_AZ.KOI8-C',\n'a3_az.koic':'az_AZ.KOI8-C',\n'aa_dj':'aa_DJ.ISO8859-1',\n'aa_er':'aa_ER.UTF-8',\n'aa_et':'aa_ET.UTF-8',\n'af':'af_ZA.ISO8859-1',\n'af_za':'af_ZA.ISO8859-1',\n'agr_pe':'agr_PE.UTF-8',\n'ak_gh':'ak_GH.UTF-8',\n'am':'am_ET.UTF-8',\n'am_et':'am_ET.UTF-8',\n'american':'en_US.ISO8859-1',\n'an_es':'an_ES.ISO8859-15',\n'anp_in':'anp_IN.UTF-8',\n'ar':'ar_AA.ISO8859-6',\n'ar_aa':'ar_AA.ISO8859-6',\n'ar_ae':'ar_AE.ISO8859-6',\n'ar_bh':'ar_BH.ISO8859-6',\n'ar_dz':'ar_DZ.ISO8859-6',\n'ar_eg':'ar_EG.ISO8859-6',\n'ar_in':'ar_IN.UTF-8',\n'ar_iq':'ar_IQ.ISO8859-6',\n'ar_jo':'ar_JO.ISO8859-6',\n'ar_kw':'ar_KW.ISO8859-6',\n'ar_lb':'ar_LB.ISO8859-6',\n'ar_ly':'ar_LY.ISO8859-6',\n'ar_ma':'ar_MA.ISO8859-6',\n'ar_om':'ar_OM.ISO8859-6',\n'ar_qa':'ar_QA.ISO8859-6',\n'ar_sa':'ar_SA.ISO8859-6',\n'ar_sd':'ar_SD.ISO8859-6',\n'ar_ss':'ar_SS.UTF-8',\n'ar_sy':'ar_SY.ISO8859-6',\n'ar_tn':'ar_TN.ISO8859-6',\n'ar_ye':'ar_YE.ISO8859-6',\n'arabic':'ar_AA.ISO8859-6',\n'as':'as_IN.UTF-8',\n'as_in':'as_IN.UTF-8',\n'ast_es':'ast_ES.ISO8859-15',\n'ayc_pe':'ayc_PE.UTF-8',\n'az':'az_AZ.ISO8859-9E',\n'az_az':'az_AZ.ISO8859-9E',\n'az_az.iso88599e':'az_AZ.ISO8859-9E',\n'az_ir':'az_IR.UTF-8',\n'be':'be_BY.CP1251',\n'be@latin':'be_BY.UTF-8@latin',\n'be_bg.utf8':'bg_BG.UTF-8',\n'be_by':'be_BY.CP1251',\n'be_by@latin':'be_BY.UTF-8@latin',\n'bem_zm':'bem_ZM.UTF-8',\n'ber_dz':'ber_DZ.UTF-8',\n'ber_ma':'ber_MA.UTF-8',\n'bg':'bg_BG.CP1251',\n'bg_bg':'bg_BG.CP1251',\n'bhb_in.utf8':'bhb_IN.UTF-8',\n'bho_in':'bho_IN.UTF-8',\n'bho_np':'bho_NP.UTF-8',\n'bi_vu':'bi_VU.UTF-8',\n'bn_bd':'bn_BD.UTF-8',\n'bn_in':'bn_IN.UTF-8',\n'bo_cn':'bo_CN.UTF-8',\n'bo_in':'bo_IN.UTF-8',\n'bokmal':'nb_NO.ISO8859-1',\n'bokm\\xe5l':'nb_NO.ISO8859-1',\n'br':'br_FR.ISO8859-1',\n'br_fr':'br_FR.ISO8859-1',\n'brx_in':'brx_IN.UTF-8',\n'bs':'bs_BA.ISO8859-2',\n'bs_ba':'bs_BA.ISO8859-2',\n'bulgarian':'bg_BG.CP1251',\n'byn_er':'byn_ER.UTF-8',\n'c':'C',\n'c-french':'fr_CA.ISO8859-1',\n'c.ascii':'C',\n'c.en':'C',\n'c.iso88591':'en_US.ISO8859-1',\n'c.utf8':'en_US.UTF-8',\n'c_c':'C',\n'c_c.c':'C',\n'ca':'ca_ES.ISO8859-1',\n'ca_ad':'ca_AD.ISO8859-1',\n'ca_es':'ca_ES.ISO8859-1',\n'ca_es@valencia':'ca_ES.UTF-8@valencia',\n'ca_fr':'ca_FR.ISO8859-1',\n'ca_it':'ca_IT.ISO8859-1',\n'catalan':'ca_ES.ISO8859-1',\n'ce_ru':'ce_RU.UTF-8',\n'cextend':'en_US.ISO8859-1',\n'chinese-s':'zh_CN.eucCN',\n'chinese-t':'zh_TW.eucTW',\n'chr_us':'chr_US.UTF-8',\n'ckb_iq':'ckb_IQ.UTF-8',\n'cmn_tw':'cmn_TW.UTF-8',\n'crh_ua':'crh_UA.UTF-8',\n'croatian':'hr_HR.ISO8859-2',\n'cs':'cs_CZ.ISO8859-2',\n'cs_cs':'cs_CZ.ISO8859-2',\n'cs_cz':'cs_CZ.ISO8859-2',\n'csb_pl':'csb_PL.UTF-8',\n'cv_ru':'cv_RU.UTF-8',\n'cy':'cy_GB.ISO8859-1',\n'cy_gb':'cy_GB.ISO8859-1',\n'cz':'cs_CZ.ISO8859-2',\n'cz_cz':'cs_CZ.ISO8859-2',\n'czech':'cs_CZ.ISO8859-2',\n'da':'da_DK.ISO8859-1',\n'da_dk':'da_DK.ISO8859-1',\n'danish':'da_DK.ISO8859-1',\n'dansk':'da_DK.ISO8859-1',\n'de':'de_DE.ISO8859-1',\n'de_at':'de_AT.ISO8859-1',\n'de_be':'de_BE.ISO8859-1',\n'de_ch':'de_CH.ISO8859-1',\n'de_de':'de_DE.ISO8859-1',\n'de_it':'de_IT.ISO8859-1',\n'de_li.utf8':'de_LI.UTF-8',\n'de_lu':'de_LU.ISO8859-1',\n'deutsch':'de_DE.ISO8859-1',\n'doi_in':'doi_IN.UTF-8',\n'dutch':'nl_NL.ISO8859-1',\n'dutch.iso88591':'nl_BE.ISO8859-1',\n'dv_mv':'dv_MV.UTF-8',\n'dz_bt':'dz_BT.UTF-8',\n'ee':'ee_EE.ISO8859-4',\n'ee_ee':'ee_EE.ISO8859-4',\n'eesti':'et_EE.ISO8859-1',\n'el':'el_GR.ISO8859-7',\n'el_cy':'el_CY.ISO8859-7',\n'el_gr':'el_GR.ISO8859-7',\n'el_gr@euro':'el_GR.ISO8859-15',\n'en':'en_US.ISO8859-1',\n'en_ag':'en_AG.UTF-8',\n'en_au':'en_AU.ISO8859-1',\n'en_be':'en_BE.ISO8859-1',\n'en_bw':'en_BW.ISO8859-1',\n'en_ca':'en_CA.ISO8859-1',\n'en_dk':'en_DK.ISO8859-1',\n'en_dl.utf8':'en_DL.UTF-8',\n'en_gb':'en_GB.ISO8859-1',\n'en_hk':'en_HK.ISO8859-1',\n'en_ie':'en_IE.ISO8859-1',\n'en_il':'en_IL.UTF-8',\n'en_in':'en_IN.ISO8859-1',\n'en_ng':'en_NG.UTF-8',\n'en_nz':'en_NZ.ISO8859-1',\n'en_ph':'en_PH.ISO8859-1',\n'en_sc.utf8':'en_SC.UTF-8',\n'en_sg':'en_SG.ISO8859-1',\n'en_uk':'en_GB.ISO8859-1',\n'en_us':'en_US.ISO8859-1',\n'en_us@euro@euro':'en_US.ISO8859-15',\n'en_za':'en_ZA.ISO8859-1',\n'en_zm':'en_ZM.UTF-8',\n'en_zw':'en_ZW.ISO8859-1',\n'en_zw.utf8':'en_ZS.UTF-8',\n'eng_gb':'en_GB.ISO8859-1',\n'english':'en_EN.ISO8859-1',\n'english.iso88591':'en_US.ISO8859-1',\n'english_uk':'en_GB.ISO8859-1',\n'english_united-states':'en_US.ISO8859-1',\n'english_united-states.437':'C',\n'english_us':'en_US.ISO8859-1',\n'eo':'eo_XX.ISO8859-3',\n'eo.utf8':'eo.UTF-8',\n'eo_eo':'eo_EO.ISO8859-3',\n'eo_us.utf8':'eo_US.UTF-8',\n'eo_xx':'eo_XX.ISO8859-3',\n'es':'es_ES.ISO8859-1',\n'es_ar':'es_AR.ISO8859-1',\n'es_bo':'es_BO.ISO8859-1',\n'es_cl':'es_CL.ISO8859-1',\n'es_co':'es_CO.ISO8859-1',\n'es_cr':'es_CR.ISO8859-1',\n'es_cu':'es_CU.UTF-8',\n'es_do':'es_DO.ISO8859-1',\n'es_ec':'es_EC.ISO8859-1',\n'es_es':'es_ES.ISO8859-1',\n'es_gt':'es_GT.ISO8859-1',\n'es_hn':'es_HN.ISO8859-1',\n'es_mx':'es_MX.ISO8859-1',\n'es_ni':'es_NI.ISO8859-1',\n'es_pa':'es_PA.ISO8859-1',\n'es_pe':'es_PE.ISO8859-1',\n'es_pr':'es_PR.ISO8859-1',\n'es_py':'es_PY.ISO8859-1',\n'es_sv':'es_SV.ISO8859-1',\n'es_us':'es_US.ISO8859-1',\n'es_uy':'es_UY.ISO8859-1',\n'es_ve':'es_VE.ISO8859-1',\n'estonian':'et_EE.ISO8859-1',\n'et':'et_EE.ISO8859-15',\n'et_ee':'et_EE.ISO8859-15',\n'eu':'eu_ES.ISO8859-1',\n'eu_es':'eu_ES.ISO8859-1',\n'eu_fr':'eu_FR.ISO8859-1',\n'fa':'fa_IR.UTF-8',\n'fa_ir':'fa_IR.UTF-8',\n'fa_ir.isiri3342':'fa_IR.ISIRI-3342',\n'ff_sn':'ff_SN.UTF-8',\n'fi':'fi_FI.ISO8859-15',\n'fi_fi':'fi_FI.ISO8859-15',\n'fil_ph':'fil_PH.UTF-8',\n'finnish':'fi_FI.ISO8859-1',\n'fo':'fo_FO.ISO8859-1',\n'fo_fo':'fo_FO.ISO8859-1',\n'fr':'fr_FR.ISO8859-1',\n'fr_be':'fr_BE.ISO8859-1',\n'fr_ca':'fr_CA.ISO8859-1',\n'fr_ch':'fr_CH.ISO8859-1',\n'fr_fr':'fr_FR.ISO8859-1',\n'fr_lu':'fr_LU.ISO8859-1',\n'fran\\xe7ais':'fr_FR.ISO8859-1',\n'fre_fr':'fr_FR.ISO8859-1',\n'french':'fr_FR.ISO8859-1',\n'french.iso88591':'fr_CH.ISO8859-1',\n'french_france':'fr_FR.ISO8859-1',\n'fur_it':'fur_IT.UTF-8',\n'fy_de':'fy_DE.UTF-8',\n'fy_nl':'fy_NL.UTF-8',\n'ga':'ga_IE.ISO8859-1',\n'ga_ie':'ga_IE.ISO8859-1',\n'galego':'gl_ES.ISO8859-1',\n'galician':'gl_ES.ISO8859-1',\n'gd':'gd_GB.ISO8859-1',\n'gd_gb':'gd_GB.ISO8859-1',\n'ger_de':'de_DE.ISO8859-1',\n'german':'de_DE.ISO8859-1',\n'german.iso88591':'de_CH.ISO8859-1',\n'german_germany':'de_DE.ISO8859-1',\n'gez_er':'gez_ER.UTF-8',\n'gez_et':'gez_ET.UTF-8',\n'gl':'gl_ES.ISO8859-1',\n'gl_es':'gl_ES.ISO8859-1',\n'greek':'el_GR.ISO8859-7',\n'gu_in':'gu_IN.UTF-8',\n'gv':'gv_GB.ISO8859-1',\n'gv_gb':'gv_GB.ISO8859-1',\n'ha_ng':'ha_NG.UTF-8',\n'hak_tw':'hak_TW.UTF-8',\n'he':'he_IL.ISO8859-8',\n'he_il':'he_IL.ISO8859-8',\n'hebrew':'he_IL.ISO8859-8',\n'hi':'hi_IN.ISCII-DEV',\n'hi_in':'hi_IN.ISCII-DEV',\n'hi_in.isciidev':'hi_IN.ISCII-DEV',\n'hif_fj':'hif_FJ.UTF-8',\n'hne':'hne_IN.UTF-8',\n'hne_in':'hne_IN.UTF-8',\n'hr':'hr_HR.ISO8859-2',\n'hr_hr':'hr_HR.ISO8859-2',\n'hrvatski':'hr_HR.ISO8859-2',\n'hsb_de':'hsb_DE.ISO8859-2',\n'ht_ht':'ht_HT.UTF-8',\n'hu':'hu_HU.ISO8859-2',\n'hu_hu':'hu_HU.ISO8859-2',\n'hungarian':'hu_HU.ISO8859-2',\n'hy_am':'hy_AM.UTF-8',\n'hy_am.armscii8':'hy_AM.ARMSCII_8',\n'ia':'ia.UTF-8',\n'ia_fr':'ia_FR.UTF-8',\n'icelandic':'is_IS.ISO8859-1',\n'id':'id_ID.ISO8859-1',\n'id_id':'id_ID.ISO8859-1',\n'ig_ng':'ig_NG.UTF-8',\n'ik_ca':'ik_CA.UTF-8',\n'in':'id_ID.ISO8859-1',\n'in_id':'id_ID.ISO8859-1',\n'is':'is_IS.ISO8859-1',\n'is_is':'is_IS.ISO8859-1',\n'iso-8859-1':'en_US.ISO8859-1',\n'iso-8859-15':'en_US.ISO8859-15',\n'iso8859-1':'en_US.ISO8859-1',\n'iso8859-15':'en_US.ISO8859-15',\n'iso_8859_1':'en_US.ISO8859-1',\n'iso_8859_15':'en_US.ISO8859-15',\n'it':'it_IT.ISO8859-1',\n'it_ch':'it_CH.ISO8859-1',\n'it_it':'it_IT.ISO8859-1',\n'italian':'it_IT.ISO8859-1',\n'iu':'iu_CA.NUNACOM-8',\n'iu_ca':'iu_CA.NUNACOM-8',\n'iu_ca.nunacom8':'iu_CA.NUNACOM-8',\n'iw':'he_IL.ISO8859-8',\n'iw_il':'he_IL.ISO8859-8',\n'iw_il.utf8':'iw_IL.UTF-8',\n'ja':'ja_JP.eucJP',\n'ja_jp':'ja_JP.eucJP',\n'ja_jp.euc':'ja_JP.eucJP',\n'ja_jp.mscode':'ja_JP.SJIS',\n'ja_jp.pck':'ja_JP.SJIS',\n'japan':'ja_JP.eucJP',\n'japanese':'ja_JP.eucJP',\n'japanese-euc':'ja_JP.eucJP',\n'japanese.euc':'ja_JP.eucJP',\n'jp_jp':'ja_JP.eucJP',\n'ka':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge.georgianacademy':'ka_GE.GEORGIAN-ACADEMY',\n'ka_ge.georgianps':'ka_GE.GEORGIAN-PS',\n'ka_ge.georgianrs':'ka_GE.GEORGIAN-ACADEMY',\n'kab_dz':'kab_DZ.UTF-8',\n'kk_kz':'kk_KZ.ptcp154',\n'kl':'kl_GL.ISO8859-1',\n'kl_gl':'kl_GL.ISO8859-1',\n'km_kh':'km_KH.UTF-8',\n'kn':'kn_IN.UTF-8',\n'kn_in':'kn_IN.UTF-8',\n'ko':'ko_KR.eucKR',\n'ko_kr':'ko_KR.eucKR',\n'ko_kr.euc':'ko_KR.eucKR',\n'kok_in':'kok_IN.UTF-8',\n'korean':'ko_KR.eucKR',\n'korean.euc':'ko_KR.eucKR',\n'ks':'ks_IN.UTF-8',\n'ks_in':'ks_IN.UTF-8',\n'ks_in@devanagari.utf8':'ks_IN.UTF-8@devanagari',\n'ku_tr':'ku_TR.ISO8859-9',\n'kw':'kw_GB.ISO8859-1',\n'kw_gb':'kw_GB.ISO8859-1',\n'ky':'ky_KG.UTF-8',\n'ky_kg':'ky_KG.UTF-8',\n'lb_lu':'lb_LU.UTF-8',\n'lg_ug':'lg_UG.ISO8859-10',\n'li_be':'li_BE.UTF-8',\n'li_nl':'li_NL.UTF-8',\n'lij_it':'lij_IT.UTF-8',\n'lithuanian':'lt_LT.ISO8859-13',\n'ln_cd':'ln_CD.UTF-8',\n'lo':'lo_LA.MULELAO-1',\n'lo_la':'lo_LA.MULELAO-1',\n'lo_la.cp1133':'lo_LA.IBM-CP1133',\n'lo_la.ibmcp1133':'lo_LA.IBM-CP1133',\n'lo_la.mulelao1':'lo_LA.MULELAO-1',\n'lt':'lt_LT.ISO8859-13',\n'lt_lt':'lt_LT.ISO8859-13',\n'lv':'lv_LV.ISO8859-13',\n'lv_lv':'lv_LV.ISO8859-13',\n'lzh_tw':'lzh_TW.UTF-8',\n'mag_in':'mag_IN.UTF-8',\n'mai':'mai_IN.UTF-8',\n'mai_in':'mai_IN.UTF-8',\n'mai_np':'mai_NP.UTF-8',\n'mfe_mu':'mfe_MU.UTF-8',\n'mg_mg':'mg_MG.ISO8859-15',\n'mhr_ru':'mhr_RU.UTF-8',\n'mi':'mi_NZ.ISO8859-1',\n'mi_nz':'mi_NZ.ISO8859-1',\n'miq_ni':'miq_NI.UTF-8',\n'mjw_in':'mjw_IN.UTF-8',\n'mk':'mk_MK.ISO8859-5',\n'mk_mk':'mk_MK.ISO8859-5',\n'ml':'ml_IN.UTF-8',\n'ml_in':'ml_IN.UTF-8',\n'mn_mn':'mn_MN.UTF-8',\n'mni_in':'mni_IN.UTF-8',\n'mr':'mr_IN.UTF-8',\n'mr_in':'mr_IN.UTF-8',\n'ms':'ms_MY.ISO8859-1',\n'ms_my':'ms_MY.ISO8859-1',\n'mt':'mt_MT.ISO8859-3',\n'mt_mt':'mt_MT.ISO8859-3',\n'my_mm':'my_MM.UTF-8',\n'nan_tw':'nan_TW.UTF-8',\n'nb':'nb_NO.ISO8859-1',\n'nb_no':'nb_NO.ISO8859-1',\n'nds_de':'nds_DE.UTF-8',\n'nds_nl':'nds_NL.UTF-8',\n'ne_np':'ne_NP.UTF-8',\n'nhn_mx':'nhn_MX.UTF-8',\n'niu_nu':'niu_NU.UTF-8',\n'niu_nz':'niu_NZ.UTF-8',\n'nl':'nl_NL.ISO8859-1',\n'nl_aw':'nl_AW.UTF-8',\n'nl_be':'nl_BE.ISO8859-1',\n'nl_nl':'nl_NL.ISO8859-1',\n'nn':'nn_NO.ISO8859-1',\n'nn_no':'nn_NO.ISO8859-1',\n'no':'no_NO.ISO8859-1',\n'no@nynorsk':'ny_NO.ISO8859-1',\n'no_no':'no_NO.ISO8859-1',\n'no_no.iso88591@bokmal':'no_NO.ISO8859-1',\n'no_no.iso88591@nynorsk':'no_NO.ISO8859-1',\n'norwegian':'no_NO.ISO8859-1',\n'nr':'nr_ZA.ISO8859-1',\n'nr_za':'nr_ZA.ISO8859-1',\n'nso':'nso_ZA.ISO8859-15',\n'nso_za':'nso_ZA.ISO8859-15',\n'ny':'ny_NO.ISO8859-1',\n'ny_no':'ny_NO.ISO8859-1',\n'nynorsk':'nn_NO.ISO8859-1',\n'oc':'oc_FR.ISO8859-1',\n'oc_fr':'oc_FR.ISO8859-1',\n'om_et':'om_ET.UTF-8',\n'om_ke':'om_KE.ISO8859-1',\n'or':'or_IN.UTF-8',\n'or_in':'or_IN.UTF-8',\n'os_ru':'os_RU.UTF-8',\n'pa':'pa_IN.UTF-8',\n'pa_in':'pa_IN.UTF-8',\n'pa_pk':'pa_PK.UTF-8',\n'pap_an':'pap_AN.UTF-8',\n'pap_aw':'pap_AW.UTF-8',\n'pap_cw':'pap_CW.UTF-8',\n'pd':'pd_US.ISO8859-1',\n'pd_de':'pd_DE.ISO8859-1',\n'pd_us':'pd_US.ISO8859-1',\n'ph':'ph_PH.ISO8859-1',\n'ph_ph':'ph_PH.ISO8859-1',\n'pl':'pl_PL.ISO8859-2',\n'pl_pl':'pl_PL.ISO8859-2',\n'polish':'pl_PL.ISO8859-2',\n'portuguese':'pt_PT.ISO8859-1',\n'portuguese_brazil':'pt_BR.ISO8859-1',\n'posix':'C',\n'posix-utf2':'C',\n'pp':'pp_AN.ISO8859-1',\n'pp_an':'pp_AN.ISO8859-1',\n'ps_af':'ps_AF.UTF-8',\n'pt':'pt_PT.ISO8859-1',\n'pt_br':'pt_BR.ISO8859-1',\n'pt_pt':'pt_PT.ISO8859-1',\n'quz_pe':'quz_PE.UTF-8',\n'raj_in':'raj_IN.UTF-8',\n'ro':'ro_RO.ISO8859-2',\n'ro_ro':'ro_RO.ISO8859-2',\n'romanian':'ro_RO.ISO8859-2',\n'ru':'ru_RU.UTF-8',\n'ru_ru':'ru_RU.UTF-8',\n'ru_ua':'ru_UA.KOI8-U',\n'rumanian':'ro_RO.ISO8859-2',\n'russian':'ru_RU.KOI8-R',\n'rw':'rw_RW.ISO8859-1',\n'rw_rw':'rw_RW.ISO8859-1',\n'sa_in':'sa_IN.UTF-8',\n'sat_in':'sat_IN.UTF-8',\n'sc_it':'sc_IT.UTF-8',\n'sd':'sd_IN.UTF-8',\n'sd_in':'sd_IN.UTF-8',\n'sd_in@devanagari.utf8':'sd_IN.UTF-8@devanagari',\n'sd_pk':'sd_PK.UTF-8',\n'se_no':'se_NO.UTF-8',\n'serbocroatian':'sr_RS.UTF-8@latin',\n'sgs_lt':'sgs_LT.UTF-8',\n'sh':'sr_RS.UTF-8@latin',\n'sh_ba.iso88592@bosnia':'sr_CS.ISO8859-2',\n'sh_hr':'sh_HR.ISO8859-2',\n'sh_hr.iso88592':'hr_HR.ISO8859-2',\n'sh_sp':'sr_CS.ISO8859-2',\n'sh_yu':'sr_RS.UTF-8@latin',\n'shn_mm':'shn_MM.UTF-8',\n'shs_ca':'shs_CA.UTF-8',\n'si':'si_LK.UTF-8',\n'si_lk':'si_LK.UTF-8',\n'sid_et':'sid_ET.UTF-8',\n'sinhala':'si_LK.UTF-8',\n'sk':'sk_SK.ISO8859-2',\n'sk_sk':'sk_SK.ISO8859-2',\n'sl':'sl_SI.ISO8859-2',\n'sl_cs':'sl_CS.ISO8859-2',\n'sl_si':'sl_SI.ISO8859-2',\n'slovak':'sk_SK.ISO8859-2',\n'slovene':'sl_SI.ISO8859-2',\n'slovenian':'sl_SI.ISO8859-2',\n'sm_ws':'sm_WS.UTF-8',\n'so_dj':'so_DJ.ISO8859-1',\n'so_et':'so_ET.UTF-8',\n'so_ke':'so_KE.ISO8859-1',\n'so_so':'so_SO.ISO8859-1',\n'sp':'sr_CS.ISO8859-5',\n'sp_yu':'sr_CS.ISO8859-5',\n'spanish':'es_ES.ISO8859-1',\n'spanish_spain':'es_ES.ISO8859-1',\n'sq':'sq_AL.ISO8859-2',\n'sq_al':'sq_AL.ISO8859-2',\n'sq_mk':'sq_MK.UTF-8',\n'sr':'sr_RS.UTF-8',\n'sr@cyrillic':'sr_RS.UTF-8',\n'sr@latn':'sr_CS.UTF-8@latin',\n'sr_cs':'sr_CS.UTF-8',\n'sr_cs.iso88592@latn':'sr_CS.ISO8859-2',\n'sr_cs@latn':'sr_CS.UTF-8@latin',\n'sr_me':'sr_ME.UTF-8',\n'sr_rs':'sr_RS.UTF-8',\n'sr_rs@latn':'sr_RS.UTF-8@latin',\n'sr_sp':'sr_CS.ISO8859-2',\n'sr_yu':'sr_RS.UTF-8@latin',\n'sr_yu.cp1251@cyrillic':'sr_CS.CP1251',\n'sr_yu.iso88592':'sr_CS.ISO8859-2',\n'sr_yu.iso88595':'sr_CS.ISO8859-5',\n'sr_yu.iso88595@cyrillic':'sr_CS.ISO8859-5',\n'sr_yu.microsoftcp1251@cyrillic':'sr_CS.CP1251',\n'sr_yu.utf8':'sr_RS.UTF-8',\n'sr_yu.utf8@cyrillic':'sr_RS.UTF-8',\n'sr_yu@cyrillic':'sr_RS.UTF-8',\n'ss':'ss_ZA.ISO8859-1',\n'ss_za':'ss_ZA.ISO8859-1',\n'st':'st_ZA.ISO8859-1',\n'st_za':'st_ZA.ISO8859-1',\n'sv':'sv_SE.ISO8859-1',\n'sv_fi':'sv_FI.ISO8859-1',\n'sv_se':'sv_SE.ISO8859-1',\n'sw_ke':'sw_KE.UTF-8',\n'sw_tz':'sw_TZ.UTF-8',\n'swedish':'sv_SE.ISO8859-1',\n'szl_pl':'szl_PL.UTF-8',\n'ta':'ta_IN.TSCII-0',\n'ta_in':'ta_IN.TSCII-0',\n'ta_in.tscii':'ta_IN.TSCII-0',\n'ta_in.tscii0':'ta_IN.TSCII-0',\n'ta_lk':'ta_LK.UTF-8',\n'tcy_in.utf8':'tcy_IN.UTF-8',\n'te':'te_IN.UTF-8',\n'te_in':'te_IN.UTF-8',\n'tg':'tg_TJ.KOI8-C',\n'tg_tj':'tg_TJ.KOI8-C',\n'th':'th_TH.ISO8859-11',\n'th_th':'th_TH.ISO8859-11',\n'th_th.tactis':'th_TH.TIS620',\n'th_th.tis620':'th_TH.TIS620',\n'thai':'th_TH.ISO8859-11',\n'the_np':'the_NP.UTF-8',\n'ti_er':'ti_ER.UTF-8',\n'ti_et':'ti_ET.UTF-8',\n'tig_er':'tig_ER.UTF-8',\n'tk_tm':'tk_TM.UTF-8',\n'tl':'tl_PH.ISO8859-1',\n'tl_ph':'tl_PH.ISO8859-1',\n'tn':'tn_ZA.ISO8859-15',\n'tn_za':'tn_ZA.ISO8859-15',\n'to_to':'to_TO.UTF-8',\n'tpi_pg':'tpi_PG.UTF-8',\n'tr':'tr_TR.ISO8859-9',\n'tr_cy':'tr_CY.ISO8859-9',\n'tr_tr':'tr_TR.ISO8859-9',\n'ts':'ts_ZA.ISO8859-1',\n'ts_za':'ts_ZA.ISO8859-1',\n'tt':'tt_RU.TATAR-CYR',\n'tt_ru':'tt_RU.TATAR-CYR',\n'tt_ru.tatarcyr':'tt_RU.TATAR-CYR',\n'tt_ru@iqtelif':'tt_RU.UTF-8@iqtelif',\n'turkish':'tr_TR.ISO8859-9',\n'ug_cn':'ug_CN.UTF-8',\n'uk':'uk_UA.KOI8-U',\n'uk_ua':'uk_UA.KOI8-U',\n'univ':'en_US.utf',\n'universal':'en_US.utf',\n'universal.utf8@ucs4':'en_US.UTF-8',\n'unm_us':'unm_US.UTF-8',\n'ur':'ur_PK.CP1256',\n'ur_in':'ur_IN.UTF-8',\n'ur_pk':'ur_PK.CP1256',\n'uz':'uz_UZ.UTF-8',\n'uz_uz':'uz_UZ.UTF-8',\n'uz_uz@cyrillic':'uz_UZ.UTF-8',\n've':'ve_ZA.UTF-8',\n've_za':'ve_ZA.UTF-8',\n'vi':'vi_VN.TCVN',\n'vi_vn':'vi_VN.TCVN',\n'vi_vn.tcvn':'vi_VN.TCVN',\n'vi_vn.tcvn5712':'vi_VN.TCVN',\n'vi_vn.viscii':'vi_VN.VISCII',\n'vi_vn.viscii111':'vi_VN.VISCII',\n'wa':'wa_BE.ISO8859-1',\n'wa_be':'wa_BE.ISO8859-1',\n'wae_ch':'wae_CH.UTF-8',\n'wal_et':'wal_ET.UTF-8',\n'wo_sn':'wo_SN.UTF-8',\n'xh':'xh_ZA.ISO8859-1',\n'xh_za':'xh_ZA.ISO8859-1',\n'yi':'yi_US.CP1255',\n'yi_us':'yi_US.CP1255',\n'yo_ng':'yo_NG.UTF-8',\n'yue_hk':'yue_HK.UTF-8',\n'yuw_pg':'yuw_PG.UTF-8',\n'zh':'zh_CN.eucCN',\n'zh_cn':'zh_CN.gb2312',\n'zh_cn.big5':'zh_TW.big5',\n'zh_cn.euc':'zh_CN.eucCN',\n'zh_hk':'zh_HK.big5hkscs',\n'zh_hk.big5hk':'zh_HK.big5hkscs',\n'zh_sg':'zh_SG.GB2312',\n'zh_sg.gbk':'zh_SG.GBK',\n'zh_tw':'zh_TW.big5',\n'zh_tw.euc':'zh_TW.eucTW',\n'zh_tw.euctw':'zh_TW.eucTW',\n'zu':'zu_ZA.ISO8859-1',\n'zu_za':'zu_ZA.ISO8859-1',\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nwindows_locale={\n0x0436:\"af_ZA\",\n0x041c:\"sq_AL\",\n0x0484:\"gsw_FR\",\n0x045e:\"am_ET\",\n0x0401:\"ar_SA\",\n0x0801:\"ar_IQ\",\n0x0c01:\"ar_EG\",\n0x1001:\"ar_LY\",\n0x1401:\"ar_DZ\",\n0x1801:\"ar_MA\",\n0x1c01:\"ar_TN\",\n0x2001:\"ar_OM\",\n0x2401:\"ar_YE\",\n0x2801:\"ar_SY\",\n0x2c01:\"ar_JO\",\n0x3001:\"ar_LB\",\n0x3401:\"ar_KW\",\n0x3801:\"ar_AE\",\n0x3c01:\"ar_BH\",\n0x4001:\"ar_QA\",\n0x042b:\"hy_AM\",\n0x044d:\"as_IN\",\n0x042c:\"az_AZ\",\n0x082c:\"az_AZ\",\n0x046d:\"ba_RU\",\n0x042d:\"eu_ES\",\n0x0423:\"be_BY\",\n0x0445:\"bn_IN\",\n0x201a:\"bs_BA\",\n0x141a:\"bs_BA\",\n0x047e:\"br_FR\",\n0x0402:\"bg_BG\",\n\n0x0403:\"ca_ES\",\n0x0004:\"zh_CHS\",\n0x0404:\"zh_TW\",\n0x0804:\"zh_CN\",\n0x0c04:\"zh_HK\",\n0x1004:\"zh_SG\",\n0x1404:\"zh_MO\",\n0x7c04:\"zh_CHT\",\n0x0483:\"co_FR\",\n0x041a:\"hr_HR\",\n0x101a:\"hr_BA\",\n0x0405:\"cs_CZ\",\n0x0406:\"da_DK\",\n0x048c:\"gbz_AF\",\n0x0465:\"div_MV\",\n0x0413:\"nl_NL\",\n0x0813:\"nl_BE\",\n0x0409:\"en_US\",\n0x0809:\"en_GB\",\n0x0c09:\"en_AU\",\n0x1009:\"en_CA\",\n0x1409:\"en_NZ\",\n0x1809:\"en_IE\",\n0x1c09:\"en_ZA\",\n0x2009:\"en_JA\",\n0x2409:\"en_CB\",\n0x2809:\"en_BZ\",\n0x2c09:\"en_TT\",\n0x3009:\"en_ZW\",\n0x3409:\"en_PH\",\n0x4009:\"en_IN\",\n0x4409:\"en_MY\",\n0x4809:\"en_IN\",\n0x0425:\"et_EE\",\n0x0438:\"fo_FO\",\n0x0464:\"fil_PH\",\n0x040b:\"fi_FI\",\n0x040c:\"fr_FR\",\n0x080c:\"fr_BE\",\n0x0c0c:\"fr_CA\",\n0x100c:\"fr_CH\",\n0x140c:\"fr_LU\",\n0x180c:\"fr_MC\",\n0x0462:\"fy_NL\",\n0x0456:\"gl_ES\",\n0x0437:\"ka_GE\",\n0x0407:\"de_DE\",\n0x0807:\"de_CH\",\n0x0c07:\"de_AT\",\n0x1007:\"de_LU\",\n0x1407:\"de_LI\",\n0x0408:\"el_GR\",\n0x046f:\"kl_GL\",\n0x0447:\"gu_IN\",\n0x0468:\"ha_NG\",\n0x040d:\"he_IL\",\n0x0439:\"hi_IN\",\n0x040e:\"hu_HU\",\n0x040f:\"is_IS\",\n0x0421:\"id_ID\",\n0x045d:\"iu_CA\",\n0x085d:\"iu_CA\",\n0x083c:\"ga_IE\",\n0x0410:\"it_IT\",\n0x0810:\"it_CH\",\n0x0411:\"ja_JP\",\n0x044b:\"kn_IN\",\n0x043f:\"kk_KZ\",\n0x0453:\"kh_KH\",\n0x0486:\"qut_GT\",\n0x0487:\"rw_RW\",\n0x0457:\"kok_IN\",\n0x0412:\"ko_KR\",\n0x0440:\"ky_KG\",\n0x0454:\"lo_LA\",\n0x0426:\"lv_LV\",\n0x0427:\"lt_LT\",\n0x082e:\"dsb_DE\",\n0x046e:\"lb_LU\",\n0x042f:\"mk_MK\",\n0x043e:\"ms_MY\",\n0x083e:\"ms_BN\",\n0x044c:\"ml_IN\",\n0x043a:\"mt_MT\",\n0x0481:\"mi_NZ\",\n0x047a:\"arn_CL\",\n0x044e:\"mr_IN\",\n0x047c:\"moh_CA\",\n0x0450:\"mn_MN\",\n0x0850:\"mn_CN\",\n0x0461:\"ne_NP\",\n0x0414:\"nb_NO\",\n0x0814:\"nn_NO\",\n0x0482:\"oc_FR\",\n0x0448:\"or_IN\",\n0x0463:\"ps_AF\",\n0x0429:\"fa_IR\",\n0x0415:\"pl_PL\",\n0x0416:\"pt_BR\",\n0x0816:\"pt_PT\",\n0x0446:\"pa_IN\",\n0x046b:\"quz_BO\",\n0x086b:\"quz_EC\",\n0x0c6b:\"quz_PE\",\n0x0418:\"ro_RO\",\n0x0417:\"rm_CH\",\n0x0419:\"ru_RU\",\n0x243b:\"smn_FI\",\n0x103b:\"smj_NO\",\n0x143b:\"smj_SE\",\n0x043b:\"se_NO\",\n0x083b:\"se_SE\",\n0x0c3b:\"se_FI\",\n0x203b:\"sms_FI\",\n0x183b:\"sma_NO\",\n0x1c3b:\"sma_SE\",\n0x044f:\"sa_IN\",\n0x0c1a:\"sr_SP\",\n0x1c1a:\"sr_BA\",\n0x081a:\"sr_SP\",\n0x181a:\"sr_BA\",\n0x045b:\"si_LK\",\n0x046c:\"ns_ZA\",\n0x0432:\"tn_ZA\",\n0x041b:\"sk_SK\",\n0x0424:\"sl_SI\",\n0x040a:\"es_ES\",\n0x080a:\"es_MX\",\n0x0c0a:\"es_ES\",\n0x100a:\"es_GT\",\n0x140a:\"es_CR\",\n0x180a:\"es_PA\",\n0x1c0a:\"es_DO\",\n0x200a:\"es_VE\",\n0x240a:\"es_CO\",\n0x280a:\"es_PE\",\n0x2c0a:\"es_AR\",\n0x300a:\"es_EC\",\n0x340a:\"es_CL\",\n0x380a:\"es_UR\",\n0x3c0a:\"es_PY\",\n0x400a:\"es_BO\",\n0x440a:\"es_SV\",\n0x480a:\"es_HN\",\n0x4c0a:\"es_NI\",\n0x500a:\"es_PR\",\n0x540a:\"es_US\",\n\n0x0441:\"sw_KE\",\n0x041d:\"sv_SE\",\n0x081d:\"sv_FI\",\n0x045a:\"syr_SY\",\n0x0428:\"tg_TJ\",\n0x085f:\"tmz_DZ\",\n0x0449:\"ta_IN\",\n0x0444:\"tt_RU\",\n0x044a:\"te_IN\",\n0x041e:\"th_TH\",\n0x0851:\"bo_BT\",\n0x0451:\"bo_CN\",\n0x041f:\"tr_TR\",\n0x0442:\"tk_TM\",\n0x0480:\"ug_CN\",\n0x0422:\"uk_UA\",\n0x042e:\"wen_DE\",\n0x0420:\"ur_PK\",\n0x0820:\"ur_IN\",\n0x0443:\"uz_UZ\",\n0x0843:\"uz_UZ\",\n0x042a:\"vi_VN\",\n0x0452:\"cy_GB\",\n0x0488:\"wo_SN\",\n0x0434:\"xh_ZA\",\n0x0485:\"sah_RU\",\n0x0478:\"ii_CN\",\n0x046a:\"yo_NG\",\n0x0435:\"zu_ZA\",\n}\n\ndef _print_locale():\n\n ''\n \n categories={}\n def _init_categories(categories=categories):\n for k,v in globals().items():\n if k[:3]=='LC_':\n categories[k]=v\n _init_categories()\n del categories['LC_ALL']\n \n print('Locale defaults as determined by getdefaultlocale():')\n print('-'*72)\n lang,enc=getdefaultlocale()\n print('Language: ',lang or '(undefined)')\n print('Encoding: ',enc or '(undefined)')\n print()\n \n print('Locale settings on startup:')\n print('-'*72)\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or '(undefined)')\n print(' Encoding: ',enc or '(undefined)')\n print()\n \n print()\n print('Locale settings after calling resetlocale():')\n print('-'*72)\n resetlocale()\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or '(undefined)')\n print(' Encoding: ',enc or '(undefined)')\n print()\n \n try :\n setlocale(LC_ALL,\"\")\n except :\n print('NOTE:')\n print('setlocale(LC_ALL, \"\") does not support the default locale')\n print('given in the OS environment variables.')\n else :\n print()\n print('Locale settings after calling setlocale(LC_ALL, \"\"):')\n print('-'*72)\n for name,category in categories.items():\n print(name,'...')\n lang,enc=getlocale(category)\n print(' Language: ',lang or '(undefined)')\n print(' Encoding: ',enc or '(undefined)')\n print()\n \n \n \ntry :\n LC_MESSAGES\nexcept NameError:\n pass\nelse :\n __all__.append(\"LC_MESSAGES\")\n \nif __name__ =='__main__':\n print('Locale aliasing:')\n print()\n _print_locale()\n print()\n print('Number formatting:')\n print()\n _test()\n", ["_bootlocale", "_collections_abc", "_locale", "builtins", "encodings", "encodings.aliases", "functools", "os", "re", "sys", "warnings"]], "mimetypes": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport os\nimport sys\nimport posixpath\nimport urllib.parse\ntry :\n import winreg as _winreg\nexcept ImportError:\n _winreg=None\n \n__all__=[\n\"knownfiles\",\"inited\",\"MimeTypes\",\n\"guess_type\",\"guess_all_extensions\",\"guess_extension\",\n\"add_type\",\"init\",\"read_mime_types\",\n\"suffix_map\",\"encodings_map\",\"types_map\",\"common_types\"\n]\n\nknownfiles=[\n\"/etc/mime.types\",\n\"/etc/httpd/mime.types\",\n\"/etc/httpd/conf/mime.types\",\n\"/etc/apache/mime.types\",\n\"/etc/apache2/mime.types\",\n\"/usr/local/etc/httpd/conf/mime.types\",\n\"/usr/local/lib/netscape/mime.types\",\n\"/usr/local/etc/httpd/conf/mime.types\",\n\"/usr/local/etc/mime.types\",\n]\n\ninited=False\n_db=None\n\n\nclass MimeTypes:\n ''\n\n\n\n\n \n \n def __init__(self,filenames=(),strict=True ):\n if not inited:\n init()\n self.encodings_map=_encodings_map_default.copy()\n self.suffix_map=_suffix_map_default.copy()\n self.types_map=({},{})\n self.types_map_inv=({},{})\n for (ext,type)in _types_map_default.items():\n self.add_type(type,ext,True )\n for (ext,type)in _common_types_default.items():\n self.add_type(type,ext,False )\n for name in filenames:\n self.read(name,strict)\n \n def add_type(self,type,ext,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n self.types_map[strict][ext]=type\n exts=self.types_map_inv[strict].setdefault(type,[])\n if ext not in exts:\n exts.append(ext)\n \n def guess_type(self,url,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n url=os.fspath(url)\n scheme,url=urllib.parse._splittype(url)\n if scheme =='data':\n \n \n \n \n \n \n comma=url.find(',')\n if comma <0:\n \n return None ,None\n semi=url.find(';',0,comma)\n if semi >=0:\n type=url[:semi]\n else :\n type=url[:comma]\n if '='in type or '/'not in type:\n type='text/plain'\n return type,None\n base,ext=posixpath.splitext(url)\n while ext in self.suffix_map:\n base,ext=posixpath.splitext(base+self.suffix_map[ext])\n if ext in self.encodings_map:\n encoding=self.encodings_map[ext]\n base,ext=posixpath.splitext(base)\n else :\n encoding=None\n types_map=self.types_map[True ]\n if ext in types_map:\n return types_map[ext],encoding\n elif ext.lower()in types_map:\n return types_map[ext.lower()],encoding\n elif strict:\n return None ,encoding\n types_map=self.types_map[False ]\n if ext in types_map:\n return types_map[ext],encoding\n elif ext.lower()in types_map:\n return types_map[ext.lower()],encoding\n else :\n return None ,encoding\n \n def guess_all_extensions(self,type,strict=True ):\n ''\n\n\n\n\n\n\n\n\n \n type=type.lower()\n extensions=self.types_map_inv[True ].get(type,[])\n if not strict:\n for ext in self.types_map_inv[False ].get(type,[]):\n if ext not in extensions:\n extensions.append(ext)\n return extensions\n \n def guess_extension(self,type,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n extensions=self.guess_all_extensions(type,strict)\n if not extensions:\n return None\n return extensions[0]\n \n def read(self,filename,strict=True ):\n ''\n\n\n\n\n\n \n with open(filename,encoding='utf-8')as fp:\n self.readfp(fp,strict)\n \n def readfp(self,fp,strict=True ):\n ''\n\n\n\n\n\n \n while 1:\n line=fp.readline()\n if not line:\n break\n words=line.split()\n for i in range(len(words)):\n if words[i][0]=='#':\n del words[i:]\n break\n if not words:\n continue\n type,suffixes=words[0],words[1:]\n for suff in suffixes:\n self.add_type(type,'.'+suff,strict)\n \n def read_windows_registry(self,strict=True ):\n ''\n\n\n\n\n\n \n \n \n if not _winreg:\n return\n \n def enum_types(mimedb):\n i=0\n while True :\n try :\n ctype=_winreg.EnumKey(mimedb,i)\n except OSError:\n break\n else :\n if '\\0'not in ctype:\n yield ctype\n i +=1\n \n with _winreg.OpenKey(_winreg.HKEY_CLASSES_ROOT,'')as hkcr:\n for subkeyname in enum_types(hkcr):\n try :\n with _winreg.OpenKey(hkcr,subkeyname)as subkey:\n \n if not subkeyname.startswith(\".\"):\n continue\n \n mimetype,datatype=_winreg.QueryValueEx(\n subkey,'Content Type')\n if datatype !=_winreg.REG_SZ:\n continue\n self.add_type(mimetype,subkeyname,strict)\n except OSError:\n continue\n \ndef guess_type(url,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _db is None :\n init()\n return _db.guess_type(url,strict)\n \n \ndef guess_all_extensions(type,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if _db is None :\n init()\n return _db.guess_all_extensions(type,strict)\n \ndef guess_extension(type,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n if _db is None :\n init()\n return _db.guess_extension(type,strict)\n \ndef add_type(type,ext,strict=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n if _db is None :\n init()\n return _db.add_type(type,ext,strict)\n \n \ndef init(files=None ):\n global suffix_map,types_map,encodings_map,common_types\n global inited,_db\n inited=True\n \n if files is None or _db is None :\n db=MimeTypes()\n if _winreg:\n db.read_windows_registry()\n \n if files is None :\n files=knownfiles\n else :\n files=knownfiles+list(files)\n else :\n db=_db\n \n for file in files:\n if os.path.isfile(file):\n db.read(file)\n encodings_map=db.encodings_map\n suffix_map=db.suffix_map\n types_map=db.types_map[True ]\n common_types=db.types_map[False ]\n \n _db=db\n \n \ndef read_mime_types(file):\n try :\n f=open(file,encoding='utf-8')\n except OSError:\n return None\n with f:\n db=MimeTypes()\n db.readfp(f,True )\n return db.types_map[True ]\n \n \ndef _default_mime_types():\n global suffix_map,_suffix_map_default\n global encodings_map,_encodings_map_default\n global types_map,_types_map_default\n global common_types,_common_types_default\n \n suffix_map=_suffix_map_default={\n '.svgz':'.svg.gz',\n '.tgz':'.tar.gz',\n '.taz':'.tar.gz',\n '.tz':'.tar.gz',\n '.tbz2':'.tar.bz2',\n '.txz':'.tar.xz',\n }\n \n encodings_map=_encodings_map_default={\n '.gz':'gzip',\n '.Z':'compress',\n '.bz2':'bzip2',\n '.xz':'xz',\n '.br':'br',\n }\n \n \n \n \n \n \n \n \n types_map=_types_map_default={\n '.js':'application/javascript',\n '.mjs':'application/javascript',\n '.json':'application/json',\n '.webmanifest':'application/manifest+json',\n '.doc':'application/msword',\n '.dot':'application/msword',\n '.wiz':'application/msword',\n '.bin':'application/octet-stream',\n '.a':'application/octet-stream',\n '.dll':'application/octet-stream',\n '.exe':'application/octet-stream',\n '.o':'application/octet-stream',\n '.obj':'application/octet-stream',\n '.so':'application/octet-stream',\n '.oda':'application/oda',\n '.pdf':'application/pdf',\n '.p7c':'application/pkcs7-mime',\n '.ps':'application/postscript',\n '.ai':'application/postscript',\n '.eps':'application/postscript',\n '.m3u':'application/vnd.apple.mpegurl',\n '.m3u8':'application/vnd.apple.mpegurl',\n '.xls':'application/vnd.ms-excel',\n '.xlb':'application/vnd.ms-excel',\n '.ppt':'application/vnd.ms-powerpoint',\n '.pot':'application/vnd.ms-powerpoint',\n '.ppa':'application/vnd.ms-powerpoint',\n '.pps':'application/vnd.ms-powerpoint',\n '.pwz':'application/vnd.ms-powerpoint',\n '.wasm':'application/wasm',\n '.bcpio':'application/x-bcpio',\n '.cpio':'application/x-cpio',\n '.csh':'application/x-csh',\n '.dvi':'application/x-dvi',\n '.gtar':'application/x-gtar',\n '.hdf':'application/x-hdf',\n '.latex':'application/x-latex',\n '.mif':'application/x-mif',\n '.cdf':'application/x-netcdf',\n '.nc':'application/x-netcdf',\n '.p12':'application/x-pkcs12',\n '.pfx':'application/x-pkcs12',\n '.ram':'application/x-pn-realaudio',\n '.pyc':'application/x-python-code',\n '.pyo':'application/x-python-code',\n '.sh':'application/x-sh',\n '.shar':'application/x-shar',\n '.swf':'application/x-shockwave-flash',\n '.sv4cpio':'application/x-sv4cpio',\n '.sv4crc':'application/x-sv4crc',\n '.tar':'application/x-tar',\n '.tcl':'application/x-tcl',\n '.tex':'application/x-tex',\n '.texi':'application/x-texinfo',\n '.texinfo':'application/x-texinfo',\n '.roff':'application/x-troff',\n '.t':'application/x-troff',\n '.tr':'application/x-troff',\n '.man':'application/x-troff-man',\n '.me':'application/x-troff-me',\n '.ms':'application/x-troff-ms',\n '.ustar':'application/x-ustar',\n '.src':'application/x-wais-source',\n '.xsl':'application/xml',\n '.rdf':'application/xml',\n '.wsdl':'application/xml',\n '.xpdl':'application/xml',\n '.zip':'application/zip',\n '.au':'audio/basic',\n '.snd':'audio/basic',\n '.mp3':'audio/mpeg',\n '.mp2':'audio/mpeg',\n '.aif':'audio/x-aiff',\n '.aifc':'audio/x-aiff',\n '.aiff':'audio/x-aiff',\n '.ra':'audio/x-pn-realaudio',\n '.wav':'audio/x-wav',\n '.bmp':'image/bmp',\n '.gif':'image/gif',\n '.ief':'image/ief',\n '.jpg':'image/jpeg',\n '.jpe':'image/jpeg',\n '.jpeg':'image/jpeg',\n '.png':'image/png',\n '.svg':'image/svg+xml',\n '.tiff':'image/tiff',\n '.tif':'image/tiff',\n '.ico':'image/vnd.microsoft.icon',\n '.ras':'image/x-cmu-raster',\n '.bmp':'image/x-ms-bmp',\n '.pnm':'image/x-portable-anymap',\n '.pbm':'image/x-portable-bitmap',\n '.pgm':'image/x-portable-graymap',\n '.ppm':'image/x-portable-pixmap',\n '.rgb':'image/x-rgb',\n '.xbm':'image/x-xbitmap',\n '.xpm':'image/x-xpixmap',\n '.xwd':'image/x-xwindowdump',\n '.eml':'message/rfc822',\n '.mht':'message/rfc822',\n '.mhtml':'message/rfc822',\n '.nws':'message/rfc822',\n '.css':'text/css',\n '.csv':'text/csv',\n '.html':'text/html',\n '.htm':'text/html',\n '.txt':'text/plain',\n '.bat':'text/plain',\n '.c':'text/plain',\n '.h':'text/plain',\n '.ksh':'text/plain',\n '.pl':'text/plain',\n '.rtx':'text/richtext',\n '.tsv':'text/tab-separated-values',\n '.py':'text/x-python',\n '.etx':'text/x-setext',\n '.sgm':'text/x-sgml',\n '.sgml':'text/x-sgml',\n '.vcf':'text/x-vcard',\n '.xml':'text/xml',\n '.mp4':'video/mp4',\n '.mpeg':'video/mpeg',\n '.m1v':'video/mpeg',\n '.mpa':'video/mpeg',\n '.mpe':'video/mpeg',\n '.mpg':'video/mpeg',\n '.mov':'video/quicktime',\n '.qt':'video/quicktime',\n '.webm':'video/webm',\n '.avi':'video/x-msvideo',\n '.movie':'video/x-sgi-movie',\n }\n \n \n \n \n \n common_types=_common_types_default={\n '.rtf':'application/rtf',\n '.midi':'audio/midi',\n '.mid':'audio/midi',\n '.jpg':'image/jpg',\n '.pict':'image/pict',\n '.pct':'image/pict',\n '.pic':'image/pict',\n '.xul':'text/xul',\n }\n \n \n_default_mime_types()\n\n\ndef _main():\n import getopt\n \n USAGE=\"\"\"\\\nUsage: mimetypes.py [options] type\n\nOptions:\n --help / -h -- print this message and exit\n --lenient / -l -- additionally search of some common, but non-standard\n types.\n --extension / -e -- guess extension instead of type\n\nMore than one type argument may be given.\n\"\"\"\n \n def usage(code,msg=''):\n print(USAGE)\n if msg:print(msg)\n sys.exit(code)\n \n try :\n opts,args=getopt.getopt(sys.argv[1:],'hle',\n ['help','lenient','extension'])\n except getopt.error as msg:\n usage(1,msg)\n \n strict=1\n extension=0\n for opt,arg in opts:\n if opt in ('-h','--help'):\n usage(0)\n elif opt in ('-l','--lenient'):\n strict=0\n elif opt in ('-e','--extension'):\n extension=1\n for gtype in args:\n if extension:\n guess=guess_extension(gtype,strict)\n if not guess:print(\"I don't know anything about type\",gtype)\n else :print(guess)\n else :\n guess,encoding=guess_type(gtype,strict)\n if not guess:print(\"I don't know anything about type\",gtype)\n else :print('type:',guess,'encoding:',encoding)\n \n \nif __name__ =='__main__':\n _main()\n", ["getopt", "os", "posixpath", "sys", "urllib.parse", "winreg"]], "nntplib": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport re\nimport socket\nimport collections\nimport datetime\nimport sys\n\ntry :\n import ssl\nexcept ImportError:\n _have_ssl=False\nelse :\n _have_ssl=True\n \nfrom email.header import decode_header as _email_decode_header\nfrom socket import _GLOBAL_DEFAULT_TIMEOUT\n\n__all__=[\"NNTP\",\n\"NNTPError\",\"NNTPReplyError\",\"NNTPTemporaryError\",\n\"NNTPPermanentError\",\"NNTPProtocolError\",\"NNTPDataError\",\n\"decode_header\",\n]\n\n\n\n\n\n_MAXLINE=2048\n\n\n\nclass NNTPError(Exception):\n ''\n def __init__(self,*args):\n Exception.__init__(self,*args)\n try :\n self.response=args[0]\n except IndexError:\n self.response='No response given'\n \nclass NNTPReplyError(NNTPError):\n ''\n pass\n \nclass NNTPTemporaryError(NNTPError):\n ''\n pass\n \nclass NNTPPermanentError(NNTPError):\n ''\n pass\n \nclass NNTPProtocolError(NNTPError):\n ''\n pass\n \nclass NNTPDataError(NNTPError):\n ''\n pass\n \n \n \nNNTP_PORT=119\nNNTP_SSL_PORT=563\n\n\n_LONGRESP={\n'100',\n'101',\n'211',\n'215',\n'220',\n'221',\n'222',\n'224',\n'225',\n'230',\n'231',\n'282',\n}\n\n\n_DEFAULT_OVERVIEW_FMT=[\n\"subject\",\"from\",\"date\",\"message-id\",\"references\",\":bytes\",\":lines\"]\n\n\n_OVERVIEW_FMT_ALTERNATIVES={\n'bytes':':bytes',\n'lines':':lines',\n}\n\n\n_CRLF=b'\\r\\n'\n\nGroupInfo=collections.namedtuple('GroupInfo',\n['group','last','first','flag'])\n\nArticleInfo=collections.namedtuple('ArticleInfo',\n['number','message_id','lines'])\n\n\n\ndef decode_header(header_str):\n ''\n \n parts=[]\n for v,enc in _email_decode_header(header_str):\n if isinstance(v,bytes):\n parts.append(v.decode(enc or 'ascii'))\n else :\n parts.append(v)\n return ''.join(parts)\n \ndef _parse_overview_fmt(lines):\n ''\n\n\n \n fmt=[]\n for line in lines:\n if line[0]==':':\n \n name,_,suffix=line[1:].partition(':')\n name=':'+name\n else :\n \n name,_,suffix=line.partition(':')\n name=name.lower()\n name=_OVERVIEW_FMT_ALTERNATIVES.get(name,name)\n \n fmt.append(name)\n defaults=_DEFAULT_OVERVIEW_FMT\n if len(fmt)=len(fmt):\n \n \n \n continue\n field_name=fmt[i]\n is_metadata=field_name.startswith(':')\n if i >=n_defaults and not is_metadata:\n \n \n h=field_name+\": \"\n if token and token[:len(h)].lower()!=h:\n raise NNTPDataError(\"OVER/XOVER response doesn't include \"\n \"names of additional headers\")\n token=token[len(h):]if token else None\n fields[fmt[i]]=token\n overview.append((article_number,fields))\n return overview\n \ndef _parse_datetime(date_str,time_str=None ):\n ''\n\n\n \n if time_str is None :\n time_str=date_str[-6:]\n date_str=date_str[:-6]\n hours=int(time_str[:2])\n minutes=int(time_str[2:4])\n seconds=int(time_str[4:])\n year=int(date_str[:-4])\n month=int(date_str[-4:-2])\n day=int(date_str[-2:])\n \n \n if year <70:\n year +=2000\n elif year <100:\n year +=1900\n return datetime.datetime(year,month,day,hours,minutes,seconds)\n \ndef _unparse_datetime(dt,legacy=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(dt,datetime.datetime):\n time_str=\"000000\"\n else :\n time_str=\"{0.hour:02d}{0.minute:02d}{0.second:02d}\".format(dt)\n y=dt.year\n if legacy:\n y=y %100\n date_str=\"{0:02d}{1.month:02d}{1.day:02d}\".format(y,dt)\n else :\n date_str=\"{0:04d}{1.month:02d}{1.day:02d}\".format(y,dt)\n return date_str,time_str\n \n \nif _have_ssl:\n\n def _encrypt_on(sock,context,hostname):\n ''\n\n\n\n\n \n \n if context is None :\n context=ssl._create_stdlib_context()\n return context.wrap_socket(sock,server_hostname=hostname)\n \n \n \nclass NNTP:\n\n\n\n\n\n\n\n\n\n\n\n\n encoding='utf-8'\n errors='surrogateescape'\n \n def __init__(self,host,port=NNTP_PORT,user=None ,password=None ,\n readermode=None ,usenetrc=False ,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self.host=host\n self.port=port\n self.sock=self._create_socket(timeout)\n self.file=None\n try :\n self.file=self.sock.makefile(\"rwb\")\n self._base_init(readermode)\n if user or usenetrc:\n self.login(user,password,usenetrc)\n except :\n if self.file:\n self.file.close()\n self.sock.close()\n raise\n \n def _base_init(self,readermode):\n ''\n\n \n self.debugging=0\n self.welcome=self._getresp()\n \n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n \n \n \n \n self.readermode_afterauth=False\n if readermode and 'READER'not in self._caps:\n self._setreadermode()\n if not self.readermode_afterauth:\n \n self._caps=None\n self.getcapabilities()\n \n \n \n \n self.tls_on=False\n \n \n self.authenticated=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n is_connected=lambda :hasattr(self,\"file\")\n if is_connected():\n try :\n self.quit()\n except (OSError,EOFError):\n pass\n finally :\n if is_connected():\n self._close()\n \n def _create_socket(self,timeout):\n if timeout is not None and not timeout:\n raise ValueError('Non-blocking socket (timeout=0) is not supported')\n sys.audit(\"nntplib.connect\",self,self.host,self.port)\n return socket.create_connection((self.host,self.port),timeout)\n \n def getwelcome(self):\n ''\n\n\n \n \n if self.debugging:print('*welcome*',repr(self.welcome))\n return self.welcome\n \n def getcapabilities(self):\n ''\n\n \n if self._caps is None :\n self.nntp_version=1\n self.nntp_implementation=None\n try :\n resp,caps=self.capabilities()\n except (NNTPPermanentError,NNTPTemporaryError):\n \n self._caps={}\n else :\n self._caps=caps\n if 'VERSION'in caps:\n \n \n self.nntp_version=max(map(int,caps['VERSION']))\n if 'IMPLEMENTATION'in caps:\n self.nntp_implementation=' '.join(caps['IMPLEMENTATION'])\n return self._caps\n \n def set_debuglevel(self,level):\n ''\n\n\n \n \n self.debugging=level\n debug=set_debuglevel\n \n def _putline(self,line):\n ''\n \n sys.audit(\"nntplib.putline\",self,line)\n line=line+_CRLF\n if self.debugging >1:print('*put*',repr(line))\n self.file.write(line)\n self.file.flush()\n \n def _putcmd(self,line):\n ''\n \n if self.debugging:print('*cmd*',repr(line))\n line=line.encode(self.encoding,self.errors)\n self._putline(line)\n \n def _getline(self,strip_crlf=True ):\n ''\n\n \n line=self.file.readline(_MAXLINE+1)\n if len(line)>_MAXLINE:\n raise NNTPDataError('line too long')\n if self.debugging >1:\n print('*get*',repr(line))\n if not line:raise EOFError\n if strip_crlf:\n if line[-2:]==_CRLF:\n line=line[:-2]\n elif line[-1:]in _CRLF:\n line=line[:-1]\n return line\n \n def _getresp(self):\n ''\n\n \n resp=self._getline()\n if self.debugging:print('*resp*',repr(resp))\n resp=resp.decode(self.encoding,self.errors)\n c=resp[:1]\n if c =='4':\n raise NNTPTemporaryError(resp)\n if c =='5':\n raise NNTPPermanentError(resp)\n if c not in '123':\n raise NNTPProtocolError(resp)\n return resp\n \n def _getlongresp(self,file=None ):\n ''\n\n\n\n\n\n \n \n openedFile=None\n try :\n \n if isinstance(file,(str,bytes)):\n openedFile=file=open(file,\"wb\")\n \n resp=self._getresp()\n if resp[:3]not in _LONGRESP:\n raise NNTPReplyError(resp)\n \n lines=[]\n if file is not None :\n \n terminators=(b'.'+_CRLF,b'.\\n')\n while 1:\n line=self._getline(False )\n if line in terminators:\n break\n if line.startswith(b'..'):\n line=line[1:]\n file.write(line)\n else :\n terminator=b'.'\n while 1:\n line=self._getline()\n if line ==terminator:\n break\n if line.startswith(b'..'):\n line=line[1:]\n lines.append(line)\n finally :\n \n if openedFile:\n openedFile.close()\n \n return resp,lines\n \n def _shortcmd(self,line):\n ''\n \n self._putcmd(line)\n return self._getresp()\n \n def _longcmd(self,line,file=None ):\n ''\n \n self._putcmd(line)\n return self._getlongresp(file)\n \n def _longcmdstring(self,line,file=None ):\n ''\n\n\n \n self._putcmd(line)\n resp,list=self._getlongresp(file)\n return resp,[line.decode(self.encoding,self.errors)\n for line in list]\n \n def _getoverviewfmt(self):\n ''\n \n try :\n return self._cachedoverviewfmt\n except AttributeError:\n pass\n try :\n resp,lines=self._longcmdstring(\"LIST OVERVIEW.FMT\")\n except NNTPPermanentError:\n \n fmt=_DEFAULT_OVERVIEW_FMT[:]\n else :\n fmt=_parse_overview_fmt(lines)\n self._cachedoverviewfmt=fmt\n return fmt\n \n def _grouplist(self,lines):\n \n return [GroupInfo(*line.split())for line in lines]\n \n def capabilities(self):\n ''\n\n\n\n\n \n caps={}\n resp,lines=self._longcmdstring(\"CAPABILITIES\")\n for line in lines:\n name,*tokens=line.split()\n caps[name]=tokens\n return resp,caps\n \n def newgroups(self,date,*,file=None ):\n ''\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWGROUPS {0} {1}'.format(date_str,time_str)\n resp,lines=self._longcmdstring(cmd,file)\n return resp,self._grouplist(lines)\n \n def newnews(self,group,date,*,file=None ):\n ''\n\n\n\n\n\n \n if not isinstance(date,(datetime.date,datetime.date)):\n raise TypeError(\n \"the date parameter must be a date or datetime object, \"\n \"not '{:40}'\".format(date.__class__.__name__))\n date_str,time_str=_unparse_datetime(date,self.nntp_version <2)\n cmd='NEWNEWS {0} {1} {2}'.format(group,date_str,time_str)\n return self._longcmdstring(cmd,file)\n \n def list(self,group_pattern=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if group_pattern is not None :\n command='LIST ACTIVE '+group_pattern\n else :\n command='LIST'\n resp,lines=self._longcmdstring(command,file)\n return resp,self._grouplist(lines)\n \n def _getdescriptions(self,group_pattern,return_all):\n line_pat=re.compile('^(?P[^ \\t]+)[ \\t]+(.*)$')\n \n resp,lines=self._longcmdstring('LIST NEWSGROUPS '+group_pattern)\n if not resp.startswith('215'):\n \n \n \n resp,lines=self._longcmdstring('XGTITLE '+group_pattern)\n groups={}\n for raw_line in lines:\n match=line_pat.search(raw_line.strip())\n if match:\n name,desc=match.group(1,2)\n if not return_all:\n return desc\n groups[name]=desc\n if return_all:\n return resp,groups\n else :\n \n return ''\n \n def description(self,group):\n ''\n\n\n\n\n\n\n\n\n \n return self._getdescriptions(group,False )\n \n def descriptions(self,group_pattern):\n ''\n return self._getdescriptions(group_pattern,True )\n \n def group(self,name):\n ''\n\n\n\n\n\n\n\n \n resp=self._shortcmd('GROUP '+name)\n if not resp.startswith('211'):\n raise NNTPReplyError(resp)\n words=resp.split()\n count=first=last=0\n n=len(words)\n if n >1:\n count=words[1]\n if n >2:\n first=words[2]\n if n >3:\n last=words[3]\n if n >4:\n name=words[4].lower()\n return resp,int(count),int(first),int(last),name\n \n def help(self,*,file=None ):\n ''\n\n\n\n\n\n \n return self._longcmdstring('HELP',file)\n \n def _statparse(self,resp):\n ''\n \n if not resp.startswith('22'):\n raise NNTPReplyError(resp)\n words=resp.split()\n art_num=int(words[1])\n message_id=words[2]\n return resp,art_num,message_id\n \n def _statcmd(self,line):\n ''\n resp=self._shortcmd(line)\n return self._statparse(resp)\n \n def stat(self,message_spec=None ):\n ''\n\n\n\n\n\n\n \n if message_spec:\n return self._statcmd('STAT {0}'.format(message_spec))\n else :\n return self._statcmd('STAT')\n \n def next(self):\n ''\n return self._statcmd('NEXT')\n \n def last(self):\n ''\n return self._statcmd('LAST')\n \n def _artcmd(self,line,file=None ):\n ''\n resp,lines=self._longcmd(line,file)\n resp,art_num,message_id=self._statparse(resp)\n return resp,ArticleInfo(art_num,message_id,lines)\n \n def head(self,message_spec=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if message_spec is not None :\n cmd='HEAD {0}'.format(message_spec)\n else :\n cmd='HEAD'\n return self._artcmd(cmd,file)\n \n def body(self,message_spec=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if message_spec is not None :\n cmd='BODY {0}'.format(message_spec)\n else :\n cmd='BODY'\n return self._artcmd(cmd,file)\n \n def article(self,message_spec=None ,*,file=None ):\n ''\n\n\n\n\n\n \n if message_spec is not None :\n cmd='ARTICLE {0}'.format(message_spec)\n else :\n cmd='ARTICLE'\n return self._artcmd(cmd,file)\n \n def slave(self):\n ''\n\n \n return self._shortcmd('SLAVE')\n \n def xhdr(self,hdr,str,*,file=None ):\n ''\n\n\n\n\n\n\n \n pat=re.compile('^([0-9]+) ?(.*)\\n?')\n resp,lines=self._longcmdstring('XHDR {0} {1}'.format(hdr,str),file)\n def remove_number(line):\n m=pat.match(line)\n return m.group(1,2)if m else line\n return resp,[remove_number(line)for line in lines]\n \n def xover(self,start,end,*,file=None ):\n ''\n\n\n\n\n\n\n \n resp,lines=self._longcmdstring('XOVER {0}-{1}'.format(start,end),\n file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def over(self,message_spec,*,file=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n cmd='OVER'if 'OVER'in self._caps else 'XOVER'\n if isinstance(message_spec,(tuple,list)):\n start,end=message_spec\n cmd +=' {0}-{1}'.format(start,end or '')\n elif message_spec is not None :\n cmd=cmd+' '+message_spec\n resp,lines=self._longcmdstring(cmd,file)\n fmt=self._getoverviewfmt()\n return resp,_parse_overview(lines,fmt)\n \n def date(self):\n ''\n\n\n\n \n resp=self._shortcmd(\"DATE\")\n if not resp.startswith('111'):\n raise NNTPReplyError(resp)\n elem=resp.split()\n if len(elem)!=2:\n raise NNTPDataError(resp)\n date=elem[1]\n if len(date)!=14:\n raise NNTPDataError(resp)\n return resp,_parse_datetime(date,None )\n \n def _post(self,command,f):\n resp=self._shortcmd(command)\n \n if not resp.startswith('3'):\n raise NNTPReplyError(resp)\n if isinstance(f,(bytes,bytearray)):\n f=f.splitlines()\n \n \n \n \n for line in f:\n if not line.endswith(_CRLF):\n line=line.rstrip(b\"\\r\\n\")+_CRLF\n if line.startswith(b'.'):\n line=b'.'+line\n self.file.write(line)\n self.file.write(b\".\\r\\n\")\n self.file.flush()\n return self._getresp()\n \n def post(self,data):\n ''\n\n\n \n return self._post('POST',data)\n \n def ihave(self,message_id,data):\n ''\n\n\n\n\n \n return self._post('IHAVE {0}'.format(message_id),data)\n \n def _close(self):\n try :\n if self.file:\n self.file.close()\n del self.file\n finally :\n self.sock.close()\n \n def quit(self):\n ''\n \n try :\n resp=self._shortcmd('QUIT')\n finally :\n self._close()\n return resp\n \n def login(self,user=None ,password=None ,usenetrc=True ):\n if self.authenticated:\n raise ValueError(\"Already logged in.\")\n if not user and not usenetrc:\n raise ValueError(\n \"At least one of `user` and `usenetrc` must be specified\")\n \n \n \n try :\n if usenetrc and not user:\n import netrc\n credentials=netrc.netrc()\n auth=credentials.authenticators(self.host)\n if auth:\n user=auth[0]\n password=auth[2]\n except OSError:\n pass\n \n if not user:\n return\n resp=self._shortcmd('authinfo user '+user)\n if resp.startswith('381'):\n if not password:\n raise NNTPReplyError(resp)\n else :\n resp=self._shortcmd('authinfo pass '+password)\n if not resp.startswith('281'):\n raise NNTPPermanentError(resp)\n \n self._caps=None\n self.getcapabilities()\n \n \n if self.readermode_afterauth and 'READER'not in self._caps:\n self._setreadermode()\n \n self._caps=None\n self.getcapabilities()\n \n def _setreadermode(self):\n try :\n self.welcome=self._shortcmd('mode reader')\n except NNTPPermanentError:\n \n pass\n except NNTPTemporaryError as e:\n if e.response.startswith('480'):\n \n self.readermode_afterauth=True\n else :\n raise\n \n if _have_ssl:\n def starttls(self,context=None ):\n ''\n\n \n \n \n if self.tls_on:\n raise ValueError(\"TLS is already enabled.\")\n if self.authenticated:\n raise ValueError(\"TLS cannot be started after authentication.\")\n resp=self._shortcmd('STARTTLS')\n if resp.startswith('382'):\n self.file.close()\n self.sock=_encrypt_on(self.sock,context,self.host)\n self.file=self.sock.makefile(\"rwb\")\n self.tls_on=True\n \n \n self._caps=None\n self.getcapabilities()\n else :\n raise NNTPError(\"TLS failed to start.\")\n \n \nif _have_ssl:\n class NNTP_SSL(NNTP):\n \n def __init__(self,host,port=NNTP_SSL_PORT,\n user=None ,password=None ,ssl_context=None ,\n readermode=None ,usenetrc=False ,\n timeout=_GLOBAL_DEFAULT_TIMEOUT):\n ''\n\n \n self.ssl_context=ssl_context\n super().__init__(host,port,user,password,readermode,\n usenetrc,timeout)\n \n def _create_socket(self,timeout):\n sock=super()._create_socket(timeout)\n try :\n sock=_encrypt_on(sock,self.ssl_context,self.host)\n except :\n sock.close()\n raise\n else :\n return sock\n \n __all__.append(\"NNTP_SSL\")\n \n \n \nif __name__ =='__main__':\n import argparse\n \n parser=argparse.ArgumentParser(description=\"\"\"\\\n nntplib built-in demo - display the latest articles in a newsgroup\"\"\")\n parser.add_argument('-g','--group',default='gmane.comp.python.general',\n help='group to fetch messages from (default: %(default)s)')\n parser.add_argument('-s','--server',default='news.gmane.io',\n help='NNTP server hostname (default: %(default)s)')\n parser.add_argument('-p','--port',default=-1,type=int,\n help='NNTP port number (default: %s / %s)'%(NNTP_PORT,NNTP_SSL_PORT))\n parser.add_argument('-n','--nb-articles',default=10,type=int,\n help='number of articles to fetch (default: %(default)s)')\n parser.add_argument('-S','--ssl',action='store_true',default=False ,\n help='use NNTP over SSL')\n args=parser.parse_args()\n \n port=args.port\n if not args.ssl:\n if port ==-1:\n port=NNTP_PORT\n s=NNTP(host=args.server,port=port)\n else :\n if port ==-1:\n port=NNTP_SSL_PORT\n s=NNTP_SSL(host=args.server,port=port)\n \n caps=s.getcapabilities()\n if 'STARTTLS'in caps:\n s.starttls()\n resp,count,first,last,name=s.group(args.group)\n print('Group',name,'has',count,'articles, range',first,'to',last)\n \n def cut(s,lim):\n if len(s)>lim:\n s=s[:lim -4]+\"...\"\n return s\n \n first=str(int(last)-args.nb_articles+1)\n resp,overviews=s.xover(first,last)\n for artnum,over in overviews:\n author=decode_header(over['from']).split('<',1)[0]\n subject=decode_header(over['subject'])\n lines=int(over[':lines'])\n print(\"{:7} {:20} {:42} ({})\".format(\n artnum,cut(author,20),cut(subject,42),lines)\n )\n \n s.quit()\n", ["argparse", "collections", "datetime", "email.header", "netrc", "re", "socket", "ssl", "sys"]], "ntpath": [".py", "\n''\n\n\n\n\n\n\n\n\ncurdir='.'\npardir='..'\nextsep='.'\nsep='\\\\'\npathsep=';'\naltsep='/'\ndefpath='.;C:\\\\bin'\ndevnull='nul'\n\nimport os\nimport sys\nimport stat\nimport genericpath\nfrom genericpath import *\n\n__all__=[\"normcase\",\"isabs\",\"join\",\"splitdrive\",\"split\",\"splitext\",\n\"basename\",\"dirname\",\"commonprefix\",\"getsize\",\"getmtime\",\n\"getatime\",\"getctime\",\"islink\",\"exists\",\"lexists\",\"isdir\",\"isfile\",\n\"ismount\",\"expanduser\",\"expandvars\",\"normpath\",\"abspath\",\n\"curdir\",\"pardir\",\"sep\",\"pathsep\",\"defpath\",\"altsep\",\n\"extsep\",\"devnull\",\"realpath\",\"supports_unicode_filenames\",\"relpath\",\n\"samefile\",\"sameopenfile\",\"samestat\",\"commonpath\"]\n\ndef _get_bothseps(path):\n if isinstance(path,bytes):\n return b'\\\\/'\n else :\n return '\\\\/'\n \n \n \n \n \ndef normcase(s):\n ''\n\n \n s=os.fspath(s)\n if isinstance(s,bytes):\n return s.replace(b'/',b'\\\\').lower()\n else :\n return s.replace('/','\\\\').lower()\n \n \n \n \n \n \n \n \ndef isabs(s):\n ''\n s=os.fspath(s)\n \n \n if isinstance(s,bytes):\n if s.replace(b'/',b'\\\\').startswith(b'\\\\\\\\?\\\\'):\n return True\n else :\n if s.replace('/','\\\\').startswith('\\\\\\\\?\\\\'):\n return True\n s=splitdrive(s)[1]\n return len(s)>0 and s[0]in _get_bothseps(s)\n \n \n \ndef join(path,*paths):\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'\\\\'\n seps=b'\\\\/'\n colon=b':'\n else :\n sep='\\\\'\n seps='\\\\/'\n colon=':'\n try :\n if not paths:\n path[:0]+sep\n result_drive,result_path=splitdrive(path)\n for p in map(os.fspath,paths):\n p_drive,p_path=splitdrive(p)\n if p_path and p_path[0]in seps:\n \n if p_drive or not result_drive:\n result_drive=p_drive\n result_path=p_path\n continue\n elif p_drive and p_drive !=result_drive:\n if p_drive.lower()!=result_drive.lower():\n \n result_drive=p_drive\n result_path=p_path\n continue\n \n result_drive=p_drive\n \n if result_path and result_path[-1]not in seps:\n result_path=result_path+sep\n result_path=result_path+p_path\n \n if (result_path and result_path[0]not in seps and\n result_drive and result_drive[-1:]!=colon):\n return result_drive+sep+result_path\n return result_drive+result_path\n except (TypeError,AttributeError,BytesWarning):\n genericpath._check_arg_types('join',path,*paths)\n raise\n \n \n \n \n \ndef splitdrive(p):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n p=os.fspath(p)\n if len(p)>=2:\n if isinstance(p,bytes):\n sep=b'\\\\'\n altsep=b'/'\n colon=b':'\n else :\n sep='\\\\'\n altsep='/'\n colon=':'\n normp=p.replace(altsep,sep)\n if (normp[0:2]==sep *2)and (normp[2:3]!=sep):\n \n \n \n \n index=normp.find(sep,2)\n if index ==-1:\n return p[:0],p\n index2=normp.find(sep,index+1)\n \n \n if index2 ==index+1:\n return p[:0],p\n if index2 ==-1:\n index2=len(p)\n return p[:index2],p[index2:]\n if normp[1:2]==colon:\n return p[:2],p[2:]\n return p[:0],p\n \n \n \n \n \n \n \ndef split(p):\n ''\n\n\n \n p=os.fspath(p)\n seps=_get_bothseps(p)\n d,p=splitdrive(p)\n \n i=len(p)\n while i and p[i -1]not in seps:\n i -=1\n head,tail=p[:i],p[i:]\n \n head=head.rstrip(seps)or head\n return d+head,tail\n \n \n \n \n \n \n \ndef splitext(p):\n p=os.fspath(p)\n if isinstance(p,bytes):\n return genericpath._splitext(p,b'\\\\',b'/',b'.')\n else :\n return genericpath._splitext(p,'\\\\','/','.')\nsplitext.__doc__=genericpath._splitext.__doc__\n\n\n\n\ndef basename(p):\n ''\n return split(p)[1]\n \n \n \n \ndef dirname(p):\n ''\n return split(p)[0]\n \n \n \n \ndef islink(path):\n ''\n\n \n try :\n st=os.lstat(path)\n except (OSError,ValueError,AttributeError):\n return False\n return stat.S_ISLNK(st.st_mode)\n \n \n \ndef lexists(path):\n ''\n try :\n st=os.lstat(path)\n except (OSError,ValueError):\n return False\n return True\n \n \n \n \n \n \n \n \n \n \n \ntry :\n from nt import _getvolumepathname\nexcept ImportError:\n _getvolumepathname=None\ndef ismount(path):\n ''\n \n path=os.fspath(path)\n seps=_get_bothseps(path)\n path=abspath(path)\n root,rest=splitdrive(path)\n if root and root[0]in seps:\n return (not rest)or (rest in seps)\n if rest in seps:\n return True\n \n if _getvolumepathname:\n return path.rstrip(seps)==_getvolumepathname(path).rstrip(seps)\n else :\n return False\n \n \n \n \n \n \n \n \n \n \n \ndef expanduser(path):\n ''\n\n \n path=os.fspath(path)\n if isinstance(path,bytes):\n tilde=b'~'\n else :\n tilde='~'\n if not path.startswith(tilde):\n return path\n i,n=1,len(path)\n while i 0 and comps[i -1]!=pardir:\n del comps[i -1:i+1]\n i -=1\n elif i ==0 and prefix.endswith(sep):\n del comps[i]\n else :\n i +=1\n else :\n i +=1\n \n if not prefix and not comps:\n comps.append(curdir)\n return prefix+sep.join(comps)\n \ndef _abspath_fallback(path):\n ''\n\n\n\n \n \n path=os.fspath(path)\n if not isabs(path):\n if isinstance(path,bytes):\n cwd=os.getcwdb()\n else :\n cwd=os.getcwd()\n path=join(cwd,path)\n return normpath(path)\n \n \ntry :\n from nt import _getfullpathname\n \nexcept ImportError:\n abspath=_abspath_fallback\n \nelse :\n def abspath(path):\n ''\n try :\n return normpath(_getfullpathname(path))\n except (OSError,ValueError):\n return _abspath_fallback(path)\n \ntry :\n from nt import _getfinalpathname,readlink as _nt_readlink\nexcept ImportError:\n\n realpath=abspath\nelse :\n def _readlink_deep(path):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n allowed_winerror=1,2,3,5,21,32,50,67,87,4390,4392,4393\n \n seen=set()\n while normcase(path)not in seen:\n seen.add(normcase(path))\n try :\n old_path=path\n path=_nt_readlink(path)\n \n \n if not isabs(path):\n \n \n \n if not islink(old_path):\n path=old_path\n break\n path=normpath(join(dirname(old_path),path))\n except OSError as ex:\n if ex.winerror in allowed_winerror:\n break\n raise\n except ValueError:\n \n break\n return path\n \n def _getfinalpathname_nonstrict(path):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n allowed_winerror=1,2,3,5,21,32,50,67,87,123,1920,1921\n \n \n \n tail=''\n while path:\n try :\n path=_getfinalpathname(path)\n return join(path,tail)if tail else path\n except OSError as ex:\n if ex.winerror not in allowed_winerror:\n raise\n try :\n \n \n \n new_path=_readlink_deep(path)\n if new_path !=path:\n return join(new_path,tail)if tail else new_path\n except OSError:\n \n pass\n path,name=split(path)\n \n \n \n if path and not name:\n return path+tail\n tail=join(name,tail)if tail else name\n return tail\n \n def realpath(path):\n path=normpath(path)\n if isinstance(path,bytes):\n prefix=b'\\\\\\\\?\\\\'\n unc_prefix=b'\\\\\\\\?\\\\UNC\\\\'\n new_unc_prefix=b'\\\\\\\\'\n cwd=os.getcwdb()\n \n if normcase(path)==normcase(os.fsencode(devnull)):\n return b'\\\\\\\\.\\\\NUL'\n else :\n prefix='\\\\\\\\?\\\\'\n unc_prefix='\\\\\\\\?\\\\UNC\\\\'\n new_unc_prefix='\\\\\\\\'\n cwd=os.getcwd()\n \n if normcase(path)==normcase(devnull):\n return '\\\\\\\\.\\\\NUL'\n had_prefix=path.startswith(prefix)\n if not had_prefix and not isabs(path):\n path=join(cwd,path)\n try :\n path=_getfinalpathname(path)\n initial_winerror=0\n except OSError as ex:\n initial_winerror=ex.winerror\n path=_getfinalpathname_nonstrict(path)\n \n \n \n if not had_prefix and path.startswith(prefix):\n \n \n if path.startswith(unc_prefix):\n spath=new_unc_prefix+path[len(unc_prefix):]\n else :\n spath=path[len(prefix):]\n \n try :\n if _getfinalpathname(spath)==path:\n path=spath\n except OSError as ex:\n \n \n if ex.winerror ==initial_winerror:\n path=spath\n return path\n \n \n \nsupports_unicode_filenames=(hasattr(sys,\"getwindowsversion\")and\nsys.getwindowsversion()[3]>=2)\n\ndef relpath(path,start=None ):\n ''\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'\\\\'\n curdir=b'.'\n pardir=b'..'\n else :\n sep='\\\\'\n curdir='.'\n pardir='..'\n \n if start is None :\n start=curdir\n \n if not path:\n raise ValueError(\"no path specified\")\n \n start=os.fspath(start)\n try :\n start_abs=abspath(normpath(start))\n path_abs=abspath(normpath(path))\n start_drive,start_rest=splitdrive(start_abs)\n path_drive,path_rest=splitdrive(path_abs)\n if normcase(start_drive)!=normcase(path_drive):\n raise ValueError(\"path is on mount %r, start on mount %r\"%(\n path_drive,start_drive))\n \n start_list=[x for x in start_rest.split(sep)if x]\n path_list=[x for x in path_rest.split(sep)if x]\n \n i=0\n for e1,e2 in zip(start_list,path_list):\n if normcase(e1)!=normcase(e2):\n break\n i +=1\n \n rel_list=[pardir]*(len(start_list)-i)+path_list[i:]\n if not rel_list:\n return curdir\n return join(*rel_list)\n except (TypeError,ValueError,AttributeError,BytesWarning,DeprecationWarning):\n genericpath._check_arg_types('relpath',path,start)\n raise\n \n \n \n \n \n \n \n \n \n \n \n \ndef commonpath(paths):\n ''\n \n if not paths:\n raise ValueError('commonpath() arg is an empty sequence')\n \n paths=tuple(map(os.fspath,paths))\n if isinstance(paths[0],bytes):\n sep=b'\\\\'\n altsep=b'/'\n curdir=b'.'\n else :\n sep='\\\\'\n altsep='/'\n curdir='.'\n \n try :\n drivesplits=[splitdrive(p.replace(altsep,sep).lower())for p in paths]\n split_paths=[p.split(sep)for d,p in drivesplits]\n \n try :\n isabs,=set(p[:1]==sep for d,p in drivesplits)\n except ValueError:\n raise ValueError(\"Can't mix absolute and relative paths\")from None\n \n \n \n \n if len(set(d for d,p in drivesplits))!=1:\n raise ValueError(\"Paths don't have the same drive\")\n \n drive,path=splitdrive(paths[0].replace(altsep,sep))\n common=path.split(sep)\n common=[c for c in common if c and c !=curdir]\n \n split_paths=[[c for c in s if c and c !=curdir]for s in split_paths]\n s1=min(split_paths)\n s2=max(split_paths)\n for i,c in enumerate(s1):\n if c !=s2[i]:\n common=common[:i]\n break\n else :\n common=common[:len(s1)]\n \n prefix=drive+sep if isabs else drive\n return prefix+sep.join(common)\n except (TypeError,AttributeError):\n genericpath._check_arg_types('commonpath',*paths)\n raise\n \n \ntry :\n\n\n\n\n from nt import _isdir as isdir\nexcept ImportError:\n\n pass\n", ["genericpath", "nt", "os", "stat", "string", "sys"]], "numbers": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) for numbers, according to PEP 3141.\n\nTODO: Fill out more detailed documentation on the operators.\"\"\"\n\nfrom abc import ABCMeta,abstractmethod\n\n__all__=[\"Number\",\"Complex\",\"Real\",\"Rational\",\"Integral\"]\n\nclass Number(metaclass=ABCMeta):\n ''\n\n\n\n \n __slots__=()\n \n \n __hash__=None\n \n \n \n \n \n \n \n \n \n \nclass Complex(Number):\n ''\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __complex__(self):\n ''\n \n def __bool__(self):\n ''\n return self !=0\n \n @property\n @abstractmethod\n def real(self):\n ''\n\n\n \n raise NotImplementedError\n \n @property\n @abstractmethod\n def imag(self):\n ''\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __add__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __radd__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __neg__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __pos__(self):\n ''\n raise NotImplementedError\n \n def __sub__(self,other):\n ''\n return self+-other\n \n def __rsub__(self,other):\n ''\n return -self+other\n \n @abstractmethod\n def __mul__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rmul__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __truediv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rtruediv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __pow__(self,exponent):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rpow__(self,base):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __abs__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def conjugate(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __eq__(self,other):\n ''\n raise NotImplementedError\n \nComplex.register(complex)\n\n\nclass Real(Complex):\n ''\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __float__(self):\n ''\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __trunc__(self):\n ''\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __floor__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __ceil__(self):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __round__(self,ndigits=None ):\n ''\n\n\n\n \n raise NotImplementedError\n \n def __divmod__(self,other):\n ''\n\n\n\n \n return (self //other,self %other)\n \n def __rdivmod__(self,other):\n ''\n\n\n\n \n return (other //self,other %self)\n \n @abstractmethod\n def __floordiv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rfloordiv__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __mod__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rmod__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __lt__(self,other):\n ''\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __le__(self,other):\n ''\n raise NotImplementedError\n \n \n def __complex__(self):\n ''\n return complex(float(self))\n \n @property\n def real(self):\n ''\n return +self\n \n @property\n def imag(self):\n ''\n return 0\n \n def conjugate(self):\n ''\n return +self\n \nReal.register(float)\n\n\nclass Rational(Real):\n ''\n \n __slots__=()\n \n @property\n @abstractmethod\n def numerator(self):\n raise NotImplementedError\n \n @property\n @abstractmethod\n def denominator(self):\n raise NotImplementedError\n \n \n def __float__(self):\n ''\n\n\n\n\n\n \n return self.numerator /self.denominator\n \n \nclass Integral(Rational):\n ''\n \n __slots__=()\n \n @abstractmethod\n def __int__(self):\n ''\n raise NotImplementedError\n \n def __index__(self):\n ''\n return int(self)\n \n @abstractmethod\n def __pow__(self,exponent,modulus=None ):\n ''\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def __lshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rlshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rrshift__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __and__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rand__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __xor__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __rxor__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __or__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __ror__(self,other):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def __invert__(self):\n ''\n raise NotImplementedError\n \n \n def __float__(self):\n ''\n return float(int(self))\n \n @property\n def numerator(self):\n ''\n return +self\n \n @property\n def denominator(self):\n ''\n return 1\n \nIntegral.register(int)\n", ["abc"]], "opcode": [".py", "\n\"\"\"\nopcode module - potentially shared between dis and other modules which\noperate on bytecodes (e.g. peephole optimizers).\n\"\"\"\n\n__all__=[\"cmp_op\",\"hasconst\",\"hasname\",\"hasjrel\",\"hasjabs\",\n\"haslocal\",\"hascompare\",\"hasfree\",\"opname\",\"opmap\",\n\"HAVE_ARGUMENT\",\"EXTENDED_ARG\",\"hasnargs\"]\n\n\n\n\n\n\n\n\ntry :\n from _opcode import stack_effect\n __all__.append('stack_effect')\nexcept ImportError:\n pass\n \ncmp_op=('<','<=','==','!=','>','>=')\n\nhasconst=[]\nhasname=[]\nhasjrel=[]\nhasjabs=[]\nhaslocal=[]\nhascompare=[]\nhasfree=[]\nhasnargs=[]\n\nopmap={}\nopname=['<%r>'%(op,)for op in range(256)]\n\ndef def_op(name,op):\n opname[op]=name\n opmap[name]=op\n \ndef name_op(name,op):\n def_op(name,op)\n hasname.append(op)\n \ndef jrel_op(name,op):\n def_op(name,op)\n hasjrel.append(op)\n \ndef jabs_op(name,op):\n def_op(name,op)\n hasjabs.append(op)\n \n \n \n \ndef_op('POP_TOP',1)\ndef_op('ROT_TWO',2)\ndef_op('ROT_THREE',3)\ndef_op('DUP_TOP',4)\ndef_op('DUP_TOP_TWO',5)\ndef_op('ROT_FOUR',6)\n\ndef_op('NOP',9)\ndef_op('UNARY_POSITIVE',10)\ndef_op('UNARY_NEGATIVE',11)\ndef_op('UNARY_NOT',12)\n\ndef_op('UNARY_INVERT',15)\n\ndef_op('BINARY_MATRIX_MULTIPLY',16)\ndef_op('INPLACE_MATRIX_MULTIPLY',17)\n\ndef_op('BINARY_POWER',19)\ndef_op('BINARY_MULTIPLY',20)\n\ndef_op('BINARY_MODULO',22)\ndef_op('BINARY_ADD',23)\ndef_op('BINARY_SUBTRACT',24)\ndef_op('BINARY_SUBSCR',25)\ndef_op('BINARY_FLOOR_DIVIDE',26)\ndef_op('BINARY_TRUE_DIVIDE',27)\ndef_op('INPLACE_FLOOR_DIVIDE',28)\ndef_op('INPLACE_TRUE_DIVIDE',29)\n\ndef_op('RERAISE',48)\ndef_op('WITH_EXCEPT_START',49)\ndef_op('GET_AITER',50)\ndef_op('GET_ANEXT',51)\ndef_op('BEFORE_ASYNC_WITH',52)\n\ndef_op('END_ASYNC_FOR',54)\ndef_op('INPLACE_ADD',55)\ndef_op('INPLACE_SUBTRACT',56)\ndef_op('INPLACE_MULTIPLY',57)\n\ndef_op('INPLACE_MODULO',59)\ndef_op('STORE_SUBSCR',60)\ndef_op('DELETE_SUBSCR',61)\ndef_op('BINARY_LSHIFT',62)\ndef_op('BINARY_RSHIFT',63)\ndef_op('BINARY_AND',64)\ndef_op('BINARY_XOR',65)\ndef_op('BINARY_OR',66)\ndef_op('INPLACE_POWER',67)\ndef_op('GET_ITER',68)\ndef_op('GET_YIELD_FROM_ITER',69)\n\ndef_op('PRINT_EXPR',70)\ndef_op('LOAD_BUILD_CLASS',71)\ndef_op('YIELD_FROM',72)\ndef_op('GET_AWAITABLE',73)\ndef_op('LOAD_ASSERTION_ERROR',74)\ndef_op('INPLACE_LSHIFT',75)\ndef_op('INPLACE_RSHIFT',76)\ndef_op('INPLACE_AND',77)\ndef_op('INPLACE_XOR',78)\ndef_op('INPLACE_OR',79)\n\ndef_op('LIST_TO_TUPLE',82)\ndef_op('RETURN_VALUE',83)\ndef_op('IMPORT_STAR',84)\ndef_op('SETUP_ANNOTATIONS',85)\ndef_op('YIELD_VALUE',86)\ndef_op('POP_BLOCK',87)\n\ndef_op('POP_EXCEPT',89)\n\nHAVE_ARGUMENT=90\n\nname_op('STORE_NAME',90)\nname_op('DELETE_NAME',91)\ndef_op('UNPACK_SEQUENCE',92)\njrel_op('FOR_ITER',93)\ndef_op('UNPACK_EX',94)\nname_op('STORE_ATTR',95)\nname_op('DELETE_ATTR',96)\nname_op('STORE_GLOBAL',97)\nname_op('DELETE_GLOBAL',98)\ndef_op('LOAD_CONST',100)\nhasconst.append(100)\nname_op('LOAD_NAME',101)\ndef_op('BUILD_TUPLE',102)\ndef_op('BUILD_LIST',103)\ndef_op('BUILD_SET',104)\ndef_op('BUILD_MAP',105)\nname_op('LOAD_ATTR',106)\ndef_op('COMPARE_OP',107)\nhascompare.append(107)\nname_op('IMPORT_NAME',108)\nname_op('IMPORT_FROM',109)\n\njrel_op('JUMP_FORWARD',110)\njabs_op('JUMP_IF_FALSE_OR_POP',111)\njabs_op('JUMP_IF_TRUE_OR_POP',112)\njabs_op('JUMP_ABSOLUTE',113)\njabs_op('POP_JUMP_IF_FALSE',114)\njabs_op('POP_JUMP_IF_TRUE',115)\n\nname_op('LOAD_GLOBAL',116)\n\ndef_op('IS_OP',117)\ndef_op('CONTAINS_OP',118)\n\njabs_op('JUMP_IF_NOT_EXC_MATCH',121)\njrel_op('SETUP_FINALLY',122)\n\ndef_op('LOAD_FAST',124)\nhaslocal.append(124)\ndef_op('STORE_FAST',125)\nhaslocal.append(125)\ndef_op('DELETE_FAST',126)\nhaslocal.append(126)\n\ndef_op('RAISE_VARARGS',130)\ndef_op('CALL_FUNCTION',131)\ndef_op('MAKE_FUNCTION',132)\ndef_op('BUILD_SLICE',133)\ndef_op('LOAD_CLOSURE',135)\nhasfree.append(135)\ndef_op('LOAD_DEREF',136)\nhasfree.append(136)\ndef_op('STORE_DEREF',137)\nhasfree.append(137)\ndef_op('DELETE_DEREF',138)\nhasfree.append(138)\n\ndef_op('CALL_FUNCTION_KW',141)\ndef_op('CALL_FUNCTION_EX',142)\n\njrel_op('SETUP_WITH',143)\n\ndef_op('LIST_APPEND',145)\ndef_op('SET_ADD',146)\ndef_op('MAP_ADD',147)\n\ndef_op('LOAD_CLASSDEREF',148)\nhasfree.append(148)\n\ndef_op('EXTENDED_ARG',144)\nEXTENDED_ARG=144\n\njrel_op('SETUP_ASYNC_WITH',154)\n\ndef_op('FORMAT_VALUE',155)\ndef_op('BUILD_CONST_KEY_MAP',156)\ndef_op('BUILD_STRING',157)\n\nname_op('LOAD_METHOD',160)\ndef_op('CALL_METHOD',161)\n\ndef_op('LIST_EXTEND',162)\ndef_op('SET_UPDATE',163)\ndef_op('DICT_MERGE',164)\ndef_op('DICT_UPDATE',165)\n\ndel def_op,name_op,jrel_op,jabs_op\n", ["_opcode"]], "operator": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n__all__=['abs','add','and_','attrgetter','concat','contains','countOf',\n'delitem','eq','floordiv','ge','getitem','gt','iadd','iand',\n'iconcat','ifloordiv','ilshift','imatmul','imod','imul',\n'index','indexOf','inv','invert','ior','ipow','irshift',\n'is_','is_not','isub','itemgetter','itruediv','ixor','le',\n'length_hint','lshift','lt','matmul','methodcaller','mod',\n'mul','ne','neg','not_','or_','pos','pow','rshift',\n'setitem','sub','truediv','truth','xor']\n\nfrom builtins import abs as _abs\n\n\n\n\ndef lt(a,b):\n ''\n return a =b\n \ndef gt(a,b):\n ''\n return a >b\n \n \n \ndef not_(a):\n ''\n return not a\n \ndef truth(a):\n ''\n return True if a else False\n \ndef is_(a,b):\n ''\n return a is b\n \ndef is_not(a,b):\n ''\n return a is not b\n \n \n \ndef abs(a):\n ''\n return _abs(a)\n \ndef add(a,b):\n ''\n return a+b\n \ndef and_(a,b):\n ''\n return a&b\n \ndef floordiv(a,b):\n ''\n return a //b\n \ndef index(a):\n ''\n return a.__index__()\n \ndef inv(a):\n ''\n return ~a\ninvert=inv\n\ndef lshift(a,b):\n ''\n return a <>b\n \ndef sub(a,b):\n ''\n return a -b\n \ndef truediv(a,b):\n ''\n return a /b\n \ndef xor(a,b):\n ''\n return a ^b\n \n \n \ndef concat(a,b):\n ''\n if not hasattr(a,'__getitem__'):\n msg=\"'%s' object can't be concatenated\"%type(a).__name__\n raise TypeError(msg)\n return a+b\n \ndef contains(a,b):\n ''\n return b in a\n \ndef countOf(a,b):\n ''\n count=0\n for i in a:\n if i ==b:\n count +=1\n return count\n \ndef delitem(a,b):\n ''\n del a[b]\n \ndef getitem(a,b):\n ''\n return a[b]\n \ndef indexOf(a,b):\n ''\n for i,j in enumerate(a):\n if j ==b:\n return i\n else :\n raise ValueError('sequence.index(x): x not in sequence')\n \ndef setitem(a,b,c):\n ''\n a[b]=c\n \ndef length_hint(obj,default=0):\n ''\n\n\n\n\n\n\n \n if not isinstance(default,int):\n msg=(\"'%s' object cannot be interpreted as an integer\"%\n type(default).__name__)\n raise TypeError(msg)\n \n try :\n return len(obj)\n except TypeError:\n pass\n \n try :\n hint=type(obj).__length_hint__\n except AttributeError:\n return default\n \n try :\n val=hint(obj)\n except TypeError:\n return default\n if val is NotImplemented:\n return default\n if not isinstance(val,int):\n msg=('__length_hint__ must be integer, not %s'%\n type(val).__name__)\n raise TypeError(msg)\n if val <0:\n msg='__length_hint__() should return >= 0'\n raise ValueError(msg)\n return val\n \n \n \nclass attrgetter:\n ''\n\n\n\n\n\n \n __slots__=('_attrs','_call')\n \n def __init__(self,attr,*attrs):\n if not attrs:\n if not isinstance(attr,str):\n raise TypeError('attribute name must be a string')\n self._attrs=(attr,)\n names=attr.split('.')\n def func(obj):\n for name in names:\n obj=getattr(obj,name)\n return obj\n self._call=func\n else :\n self._attrs=(attr,)+attrs\n getters=tuple(map(attrgetter,self._attrs))\n def func(obj):\n return tuple(getter(obj)for getter in getters)\n self._call=func\n \n def __call__(self,obj):\n return self._call(obj)\n \n def __repr__(self):\n return '%s.%s(%s)'%(self.__class__.__module__,\n self.__class__.__qualname__,\n ', '.join(map(repr,self._attrs)))\n \n def __reduce__(self):\n return self.__class__,self._attrs\n \nclass itemgetter:\n ''\n\n\n\n \n __slots__=('_items','_call')\n \n def __init__(self,item,*items):\n if not items:\n self._items=(item,)\n def func(obj):\n return obj[item]\n self._call=func\n else :\n self._items=items=(item,)+items\n def func(obj):\n return tuple(obj[i]for i in items)\n self._call=func\n \n def __call__(self,obj):\n return self._call(obj)\n \n def __repr__(self):\n return '%s.%s(%s)'%(self.__class__.__module__,\n self.__class__.__name__,\n ', '.join(map(repr,self._items)))\n \n def __reduce__(self):\n return self.__class__,self._items\n \nclass methodcaller:\n ''\n\n\n\n\n \n __slots__=('_name','_args','_kwargs')\n \n def __init__(self,name,/,*args,**kwargs):\n self._name=name\n if not isinstance(self._name,str):\n raise TypeError('method name must be a string')\n self._args=args\n self._kwargs=kwargs\n \n def __call__(self,obj):\n return getattr(obj,self._name)(*self._args,**self._kwargs)\n \n def __repr__(self):\n args=[repr(self._name)]\n args.extend(map(repr,self._args))\n args.extend('%s=%r'%(k,v)for k,v in self._kwargs.items())\n return '%s.%s(%s)'%(self.__class__.__module__,\n self.__class__.__name__,\n ', '.join(args))\n \n def __reduce__(self):\n if not self._kwargs:\n return self.__class__,(self._name,)+self._args\n else :\n from functools import partial\n return partial(self.__class__,self._name,**self._kwargs),self._args\n \n \n \n \ndef iadd(a,b):\n ''\n a +=b\n return a\n \ndef iand(a,b):\n ''\n a &=b\n return a\n \ndef iconcat(a,b):\n ''\n if not hasattr(a,'__getitem__'):\n msg=\"'%s' object can't be concatenated\"%type(a).__name__\n raise TypeError(msg)\n a +=b\n return a\n \ndef ifloordiv(a,b):\n ''\n a //=b\n return a\n \ndef ilshift(a,b):\n ''\n a <<=b\n return a\n \ndef imod(a,b):\n ''\n a %=b\n return a\n \ndef imul(a,b):\n ''\n a *=b\n return a\n \ndef imatmul(a,b):\n ''\n a @=b\n return a\n \ndef ior(a,b):\n ''\n a |=b\n return a\n \ndef ipow(a,b):\n ''\n a **=b\n return a\n \ndef irshift(a,b):\n ''\n a >>=b\n return a\n \ndef isub(a,b):\n ''\n a -=b\n return a\n \ndef itruediv(a,b):\n ''\n a /=b\n return a\n \ndef ixor(a,b):\n ''\n a ^=b\n return a\n \n \ntry :\n from _operator import *\nexcept ImportError:\n pass\nelse :\n from _operator import __doc__\n \n \n \n__lt__=lt\n__le__=le\n__eq__=eq\n__ne__=ne\n__ge__=ge\n__gt__=gt\n__not__=not_\n__abs__=abs\n__add__=add\n__and__=and_\n__floordiv__=floordiv\n__index__=index\n__inv__=inv\n__invert__=invert\n__lshift__=lshift\n__mod__=mod\n__mul__=mul\n__matmul__=matmul\n__neg__=neg\n__or__=or_\n__pos__=pos\n__pow__=pow\n__rshift__=rshift\n__sub__=sub\n__truediv__=truediv\n__xor__=xor\n__concat__=concat\n__contains__=contains\n__delitem__=delitem\n__getitem__=getitem\n__setitem__=setitem\n__iadd__=iadd\n__iand__=iand\n__iconcat__=iconcat\n__ifloordiv__=ifloordiv\n__ilshift__=ilshift\n__imod__=imod\n__imul__=imul\n__imatmul__=imatmul\n__ior__=ior\n__ipow__=ipow\n__irshift__=irshift\n__isub__=isub\n__itruediv__=itruediv\n__ixor__=ixor\n", ["_operator", "builtins", "functools"]], "optparse": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__=\"1.5.3\"\n\n__all__=['Option',\n'make_option',\n'SUPPRESS_HELP',\n'SUPPRESS_USAGE',\n'Values',\n'OptionContainer',\n'OptionGroup',\n'OptionParser',\n'HelpFormatter',\n'IndentedHelpFormatter',\n'TitledHelpFormatter',\n'OptParseError',\n'OptionError',\n'OptionConflictError',\n'OptionValueError',\n'BadOptionError',\n'check_choice']\n\n__copyright__=\"\"\"\nCopyright (c) 2001-2006 Gregory P. Ward. All rights reserved.\nCopyright (c) 2002-2006 Python Software Foundation. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n * Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n\n * Neither the name of the author nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\nIS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED\nTO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A\nPARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\nEXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\nPROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\nPROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\nLIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\nNEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"\n\nimport sys,os\nimport textwrap\n\ndef _repr(self):\n return \"<%s at 0x%x: %s>\"%(self.__class__.__name__,id(self),self)\n \n \n \n \n \n \n \n \ntry :\n from gettext import gettext,ngettext\nexcept ImportError:\n def gettext(message):\n return message\n \n def ngettext(singular,plural,n):\n if n ==1:\n return singular\n return plural\n \n_=gettext\n\n\nclass OptParseError(Exception):\n def __init__(self,msg):\n self.msg=msg\n \n def __str__(self):\n return self.msg\n \n \nclass OptionError(OptParseError):\n ''\n\n\n \n \n def __init__(self,msg,option):\n self.msg=msg\n self.option_id=str(option)\n \n def __str__(self):\n if self.option_id:\n return \"option %s: %s\"%(self.option_id,self.msg)\n else :\n return self.msg\n \nclass OptionConflictError(OptionError):\n ''\n\n \n \nclass OptionValueError(OptParseError):\n ''\n\n\n \n \nclass BadOptionError(OptParseError):\n ''\n\n \n def __init__(self,opt_str):\n self.opt_str=opt_str\n \n def __str__(self):\n return _(\"no such option: %s\")%self.opt_str\n \nclass AmbiguousOptionError(BadOptionError):\n ''\n\n \n def __init__(self,opt_str,possibilities):\n BadOptionError.__init__(self,opt_str)\n self.possibilities=possibilities\n \n def __str__(self):\n return (_(\"ambiguous option: %s (%s?)\")\n %(self.opt_str,\", \".join(self.possibilities)))\n \n \nclass HelpFormatter:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n NO_DEFAULT_VALUE=\"none\"\n \n def __init__(self,\n indent_increment,\n max_help_position,\n width,\n short_first):\n self.parser=None\n self.indent_increment=indent_increment\n if width is None :\n try :\n width=int(os.environ['COLUMNS'])\n except (KeyError,ValueError):\n width=80\n width -=2\n self.width=width\n self.help_position=self.max_help_position=\\\n min(max_help_position,max(width -20,indent_increment *2))\n self.current_indent=0\n self.level=0\n self.help_width=None\n self.short_first=short_first\n self.default_tag=\"%default\"\n self.option_strings={}\n self._short_opt_fmt=\"%s %s\"\n self._long_opt_fmt=\"%s=%s\"\n \n def set_parser(self,parser):\n self.parser=parser\n \n def set_short_opt_delimiter(self,delim):\n if delim not in (\"\",\" \"):\n raise ValueError(\n \"invalid metavar delimiter for short options: %r\"%delim)\n self._short_opt_fmt=\"%s\"+delim+\"%s\"\n \n def set_long_opt_delimiter(self,delim):\n if delim not in (\"=\",\" \"):\n raise ValueError(\n \"invalid metavar delimiter for long options: %r\"%delim)\n self._long_opt_fmt=\"%s\"+delim+\"%s\"\n \n def indent(self):\n self.current_indent +=self.indent_increment\n self.level +=1\n \n def dedent(self):\n self.current_indent -=self.indent_increment\n assert self.current_indent >=0,\"Indent decreased below 0.\"\n self.level -=1\n \n def format_usage(self,usage):\n raise NotImplementedError(\"subclasses must implement\")\n \n def format_heading(self,heading):\n raise NotImplementedError(\"subclasses must implement\")\n \n def _format_text(self,text):\n ''\n\n\n \n text_width=max(self.width -self.current_indent,11)\n indent=\" \"*self.current_indent\n return textwrap.fill(text,\n text_width,\n initial_indent=indent,\n subsequent_indent=indent)\n \n def format_description(self,description):\n if description:\n return self._format_text(description)+\"\\n\"\n else :\n return \"\"\n \n def format_epilog(self,epilog):\n if epilog:\n return \"\\n\"+self._format_text(epilog)+\"\\n\"\n else :\n return \"\"\n \n \n def expand_default(self,option):\n if self.parser is None or not self.default_tag:\n return option.help\n \n default_value=self.parser.defaults.get(option.dest)\n if default_value is NO_DEFAULT or default_value is None :\n default_value=self.NO_DEFAULT_VALUE\n \n return option.help.replace(self.default_tag,str(default_value))\n \n def format_option(self,option):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n result=[]\n opts=self.option_strings[option]\n opt_width=self.help_position -self.current_indent -2\n if len(opts)>opt_width:\n opts=\"%*s%s\\n\"%(self.current_indent,\"\",opts)\n indent_first=self.help_position\n else :\n opts=\"%*s%-*s \"%(self.current_indent,\"\",opt_width,opts)\n indent_first=0\n result.append(opts)\n if option.help:\n help_text=self.expand_default(option)\n help_lines=textwrap.wrap(help_text,self.help_width)\n result.append(\"%*s%s\\n\"%(indent_first,\"\",help_lines[0]))\n result.extend([\"%*s%s\\n\"%(self.help_position,\"\",line)\n for line in help_lines[1:]])\n elif opts[-1]!=\"\\n\":\n result.append(\"\\n\")\n return \"\".join(result)\n \n def store_option_strings(self,parser):\n self.indent()\n max_len=0\n for opt in parser.option_list:\n strings=self.format_option_strings(opt)\n self.option_strings[opt]=strings\n max_len=max(max_len,len(strings)+self.current_indent)\n self.indent()\n for group in parser.option_groups:\n for opt in group.option_list:\n strings=self.format_option_strings(opt)\n self.option_strings[opt]=strings\n max_len=max(max_len,len(strings)+self.current_indent)\n self.dedent()\n self.dedent()\n self.help_position=min(max_len+2,self.max_help_position)\n self.help_width=max(self.width -self.help_position,11)\n \n def format_option_strings(self,option):\n ''\n if option.takes_value():\n metavar=option.metavar or option.dest.upper()\n short_opts=[self._short_opt_fmt %(sopt,metavar)\n for sopt in option._short_opts]\n long_opts=[self._long_opt_fmt %(lopt,metavar)\n for lopt in option._long_opts]\n else :\n short_opts=option._short_opts\n long_opts=option._long_opts\n \n if self.short_first:\n opts=short_opts+long_opts\n else :\n opts=long_opts+short_opts\n \n return \", \".join(opts)\n \nclass IndentedHelpFormatter(HelpFormatter):\n ''\n \n \n def __init__(self,\n indent_increment=2,\n max_help_position=24,\n width=None ,\n short_first=1):\n HelpFormatter.__init__(\n self,indent_increment,max_help_position,width,short_first)\n \n def format_usage(self,usage):\n return _(\"Usage: %s\\n\")%usage\n \n def format_heading(self,heading):\n return \"%*s%s:\\n\"%(self.current_indent,\"\",heading)\n \n \nclass TitledHelpFormatter(HelpFormatter):\n ''\n \n \n def __init__(self,\n indent_increment=0,\n max_help_position=24,\n width=None ,\n short_first=0):\n HelpFormatter.__init__(\n self,indent_increment,max_help_position,width,short_first)\n \n def format_usage(self,usage):\n return \"%s %s\\n\"%(self.format_heading(_(\"Usage\")),usage)\n \n def format_heading(self,heading):\n return \"%s\\n%s\\n\"%(heading,\"=-\"[self.level]*len(heading))\n \n \ndef _parse_num(val,type):\n if val[:2].lower()==\"0x\":\n radix=16\n elif val[:2].lower()==\"0b\":\n radix=2\n val=val[2:]or \"0\"\n elif val[:1]==\"0\":\n radix=8\n else :\n radix=10\n \n return type(val,radix)\n \ndef _parse_int(val):\n return _parse_num(val,int)\n \n_builtin_cvt={\"int\":(_parse_int,_(\"integer\")),\n\"long\":(_parse_int,_(\"integer\")),\n\"float\":(float,_(\"floating-point\")),\n\"complex\":(complex,_(\"complex\"))}\n\ndef check_builtin(option,opt,value):\n (cvt,what)=_builtin_cvt[option.type]\n try :\n return cvt(value)\n except ValueError:\n raise OptionValueError(\n _(\"option %s: invalid %s value: %r\")%(opt,what,value))\n \ndef check_choice(option,opt,value):\n if value in option.choices:\n return value\n else :\n choices=\", \".join(map(repr,option.choices))\n raise OptionValueError(\n _(\"option %s: invalid choice: %r (choose from %s)\")\n %(opt,value,choices))\n \n \n \nNO_DEFAULT=(\"NO\",\"DEFAULT\")\n\n\nclass Option:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n ATTRS=['action',\n 'type',\n 'dest',\n 'default',\n 'nargs',\n 'const',\n 'choices',\n 'callback',\n 'callback_args',\n 'callback_kwargs',\n 'help',\n 'metavar']\n \n \n \n ACTIONS=(\"store\",\n \"store_const\",\n \"store_true\",\n \"store_false\",\n \"append\",\n \"append_const\",\n \"count\",\n \"callback\",\n \"help\",\n \"version\")\n \n \n \n \n STORE_ACTIONS=(\"store\",\n \"store_const\",\n \"store_true\",\n \"store_false\",\n \"append\",\n \"append_const\",\n \"count\")\n \n \n \n TYPED_ACTIONS=(\"store\",\n \"append\",\n \"callback\")\n \n \n \n ALWAYS_TYPED_ACTIONS=(\"store\",\n \"append\")\n \n \n CONST_ACTIONS=(\"store_const\",\n \"append_const\")\n \n \n \n TYPES=(\"string\",\"int\",\"long\",\"float\",\"complex\",\"choice\")\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n TYPE_CHECKER={\"int\":check_builtin,\n \"long\":check_builtin,\n \"float\":check_builtin,\n \"complex\":check_builtin,\n \"choice\":check_choice,\n }\n \n \n \n \n \n \n \n \n \n \n CHECK_METHODS=None\n \n \n \n \n def __init__(self,*opts,**attrs):\n \n \n self._short_opts=[]\n self._long_opts=[]\n opts=self._check_opt_strings(opts)\n self._set_opt_strings(opts)\n \n \n self._set_attrs(attrs)\n \n \n \n \n \n \n for checker in self.CHECK_METHODS:\n checker(self)\n \n def _check_opt_strings(self,opts):\n \n \n \n opts=[opt for opt in opts if opt]\n if not opts:\n raise TypeError(\"at least one option string must be supplied\")\n return opts\n \n def _set_opt_strings(self,opts):\n for opt in opts:\n if len(opt)<2:\n raise OptionError(\n \"invalid option string %r: \"\n \"must be at least two characters long\"%opt,self)\n elif len(opt)==2:\n if not (opt[0]==\"-\"and opt[1]!=\"-\"):\n raise OptionError(\n \"invalid short option string %r: \"\n \"must be of the form -x, (x any non-dash char)\"%opt,\n self)\n self._short_opts.append(opt)\n else :\n if not (opt[0:2]==\"--\"and opt[2]!=\"-\"):\n raise OptionError(\n \"invalid long option string %r: \"\n \"must start with --, followed by non-dash\"%opt,\n self)\n self._long_opts.append(opt)\n \n def _set_attrs(self,attrs):\n for attr in self.ATTRS:\n if attr in attrs:\n setattr(self,attr,attrs[attr])\n del attrs[attr]\n else :\n if attr =='default':\n setattr(self,attr,NO_DEFAULT)\n else :\n setattr(self,attr,None )\n if attrs:\n attrs=sorted(attrs.keys())\n raise OptionError(\n \"invalid keyword arguments: %s\"%\", \".join(attrs),\n self)\n \n \n \n \n def _check_action(self):\n if self.action is None :\n self.action=\"store\"\n elif self.action not in self.ACTIONS:\n raise OptionError(\"invalid action: %r\"%self.action,self)\n \n def _check_type(self):\n if self.type is None :\n if self.action in self.ALWAYS_TYPED_ACTIONS:\n if self.choices is not None :\n \n self.type=\"choice\"\n else :\n \n self.type=\"string\"\n else :\n \n \n if isinstance(self.type,type):\n self.type=self.type.__name__\n \n if self.type ==\"str\":\n self.type=\"string\"\n \n if self.type not in self.TYPES:\n raise OptionError(\"invalid option type: %r\"%self.type,self)\n if self.action not in self.TYPED_ACTIONS:\n raise OptionError(\n \"must not supply a type for action %r\"%self.action,self)\n \n def _check_choice(self):\n if self.type ==\"choice\":\n if self.choices is None :\n raise OptionError(\n \"must supply a list of choices for type 'choice'\",self)\n elif not isinstance(self.choices,(tuple,list)):\n raise OptionError(\n \"choices must be a list of strings ('%s' supplied)\"\n %str(type(self.choices)).split(\"'\")[1],self)\n elif self.choices is not None :\n raise OptionError(\n \"must not supply choices for type %r\"%self.type,self)\n \n def _check_dest(self):\n \n \n takes_value=(self.action in self.STORE_ACTIONS or\n self.type is not None )\n if self.dest is None and takes_value:\n \n \n \n if self._long_opts:\n \n self.dest=self._long_opts[0][2:].replace('-','_')\n else :\n self.dest=self._short_opts[0][1]\n \n def _check_const(self):\n if self.action not in self.CONST_ACTIONS and self.const is not None :\n raise OptionError(\n \"'const' must not be supplied for action %r\"%self.action,\n self)\n \n def _check_nargs(self):\n if self.action in self.TYPED_ACTIONS:\n if self.nargs is None :\n self.nargs=1\n elif self.nargs is not None :\n raise OptionError(\n \"'nargs' must not be supplied for action %r\"%self.action,\n self)\n \n def _check_callback(self):\n if self.action ==\"callback\":\n if not callable(self.callback):\n raise OptionError(\n \"callback not callable: %r\"%self.callback,self)\n if (self.callback_args is not None and\n not isinstance(self.callback_args,tuple)):\n raise OptionError(\n \"callback_args, if supplied, must be a tuple: not %r\"\n %self.callback_args,self)\n if (self.callback_kwargs is not None and\n not isinstance(self.callback_kwargs,dict)):\n raise OptionError(\n \"callback_kwargs, if supplied, must be a dict: not %r\"\n %self.callback_kwargs,self)\n else :\n if self.callback is not None :\n raise OptionError(\n \"callback supplied (%r) for non-callback option\"\n %self.callback,self)\n if self.callback_args is not None :\n raise OptionError(\n \"callback_args supplied for non-callback option\",self)\n if self.callback_kwargs is not None :\n raise OptionError(\n \"callback_kwargs supplied for non-callback option\",self)\n \n \n CHECK_METHODS=[_check_action,\n _check_type,\n _check_choice,\n _check_dest,\n _check_const,\n _check_nargs,\n _check_callback]\n \n \n \n \n def __str__(self):\n return \"/\".join(self._short_opts+self._long_opts)\n \n __repr__=_repr\n \n def takes_value(self):\n return self.type is not None\n \n def get_opt_string(self):\n if self._long_opts:\n return self._long_opts[0]\n else :\n return self._short_opts[0]\n \n \n \n \n def check_value(self,opt,value):\n checker=self.TYPE_CHECKER.get(self.type)\n if checker is None :\n return value\n else :\n return checker(self,opt,value)\n \n def convert_value(self,opt,value):\n if value is not None :\n if self.nargs ==1:\n return self.check_value(opt,value)\n else :\n return tuple([self.check_value(opt,v)for v in value])\n \n def process(self,opt,value,values,parser):\n \n \n \n value=self.convert_value(opt,value)\n \n \n \n \n return self.take_action(\n self.action,self.dest,opt,value,values,parser)\n \n def take_action(self,action,dest,opt,value,values,parser):\n if action ==\"store\":\n setattr(values,dest,value)\n elif action ==\"store_const\":\n setattr(values,dest,self.const)\n elif action ==\"store_true\":\n setattr(values,dest,True )\n elif action ==\"store_false\":\n setattr(values,dest,False )\n elif action ==\"append\":\n values.ensure_value(dest,[]).append(value)\n elif action ==\"append_const\":\n values.ensure_value(dest,[]).append(self.const)\n elif action ==\"count\":\n setattr(values,dest,values.ensure_value(dest,0)+1)\n elif action ==\"callback\":\n args=self.callback_args or ()\n kwargs=self.callback_kwargs or {}\n self.callback(self,opt,value,parser,*args,**kwargs)\n elif action ==\"help\":\n parser.print_help()\n parser.exit()\n elif action ==\"version\":\n parser.print_version()\n parser.exit()\n else :\n raise ValueError(\"unknown action %r\"%self.action)\n \n return 1\n \n \n \n \nSUPPRESS_HELP=\"SUPPRESS\"+\"HELP\"\nSUPPRESS_USAGE=\"SUPPRESS\"+\"USAGE\"\n\nclass Values:\n\n def __init__(self,defaults=None ):\n if defaults:\n for (attr,val)in defaults.items():\n setattr(self,attr,val)\n \n def __str__(self):\n return str(self.__dict__)\n \n __repr__=_repr\n \n def __eq__(self,other):\n if isinstance(other,Values):\n return self.__dict__ ==other.__dict__\n elif isinstance(other,dict):\n return self.__dict__ ==other\n else :\n return NotImplemented\n \n def _update_careful(self,dict):\n ''\n\n\n\n\n \n for attr in dir(self):\n if attr in dict:\n dval=dict[attr]\n if dval is not None :\n setattr(self,attr,dval)\n \n def _update_loose(self,dict):\n ''\n\n\n\n \n self.__dict__.update(dict)\n \n def _update(self,dict,mode):\n if mode ==\"careful\":\n self._update_careful(dict)\n elif mode ==\"loose\":\n self._update_loose(dict)\n else :\n raise ValueError(\"invalid update mode: %r\"%mode)\n \n def read_module(self,modname,mode=\"careful\"):\n __import__(modname)\n mod=sys.modules[modname]\n self._update(vars(mod),mode)\n \n def read_file(self,filename,mode=\"careful\"):\n vars={}\n exec(open(filename).read(),vars)\n self._update(vars,mode)\n \n def ensure_value(self,attr,value):\n if not hasattr(self,attr)or getattr(self,attr)is None :\n setattr(self,attr,value)\n return getattr(self,attr)\n \n \nclass OptionContainer:\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,option_class,conflict_handler,description):\n \n \n \n \n self._create_option_list()\n \n self.option_class=option_class\n self.set_conflict_handler(conflict_handler)\n self.set_description(description)\n \n def _create_option_mappings(self):\n \n \n \n self._short_opt={}\n self._long_opt={}\n self.defaults={}\n \n \n def _share_option_mappings(self,parser):\n \n \n self._short_opt=parser._short_opt\n self._long_opt=parser._long_opt\n self.defaults=parser.defaults\n \n def set_conflict_handler(self,handler):\n if handler not in (\"error\",\"resolve\"):\n raise ValueError(\"invalid conflict_resolution value %r\"%handler)\n self.conflict_handler=handler\n \n def set_description(self,description):\n self.description=description\n \n def get_description(self):\n return self.description\n \n \n def destroy(self):\n ''\n del self._short_opt\n del self._long_opt\n del self.defaults\n \n \n \n \n def _check_conflict(self,option):\n conflict_opts=[]\n for opt in option._short_opts:\n if opt in self._short_opt:\n conflict_opts.append((opt,self._short_opt[opt]))\n for opt in option._long_opts:\n if opt in self._long_opt:\n conflict_opts.append((opt,self._long_opt[opt]))\n \n if conflict_opts:\n handler=self.conflict_handler\n if handler ==\"error\":\n raise OptionConflictError(\n \"conflicting option string(s): %s\"\n %\", \".join([co[0]for co in conflict_opts]),\n option)\n elif handler ==\"resolve\":\n for (opt,c_option)in conflict_opts:\n if opt.startswith(\"--\"):\n c_option._long_opts.remove(opt)\n del self._long_opt[opt]\n else :\n c_option._short_opts.remove(opt)\n del self._short_opt[opt]\n if not (c_option._short_opts or c_option._long_opts):\n c_option.container.option_list.remove(c_option)\n \n def add_option(self,*args,**kwargs):\n ''\n\n \n if isinstance(args[0],str):\n option=self.option_class(*args,**kwargs)\n elif len(args)==1 and not kwargs:\n option=args[0]\n if not isinstance(option,Option):\n raise TypeError(\"not an Option instance: %r\"%option)\n else :\n raise TypeError(\"invalid arguments\")\n \n self._check_conflict(option)\n \n self.option_list.append(option)\n option.container=self\n for opt in option._short_opts:\n self._short_opt[opt]=option\n for opt in option._long_opts:\n self._long_opt[opt]=option\n \n if option.dest is not None :\n if option.default is not NO_DEFAULT:\n self.defaults[option.dest]=option.default\n elif option.dest not in self.defaults:\n self.defaults[option.dest]=None\n \n return option\n \n def add_options(self,option_list):\n for option in option_list:\n self.add_option(option)\n \n \n \n def get_option(self,opt_str):\n return (self._short_opt.get(opt_str)or\n self._long_opt.get(opt_str))\n \n def has_option(self,opt_str):\n return (opt_str in self._short_opt or\n opt_str in self._long_opt)\n \n def remove_option(self,opt_str):\n option=self._short_opt.get(opt_str)\n if option is None :\n option=self._long_opt.get(opt_str)\n if option is None :\n raise ValueError(\"no such option %r\"%opt_str)\n \n for opt in option._short_opts:\n del self._short_opt[opt]\n for opt in option._long_opts:\n del self._long_opt[opt]\n option.container.option_list.remove(option)\n \n \n \n \n def format_option_help(self,formatter):\n if not self.option_list:\n return \"\"\n result=[]\n for option in self.option_list:\n if not option.help is SUPPRESS_HELP:\n result.append(formatter.format_option(option))\n return \"\".join(result)\n \n def format_description(self,formatter):\n return formatter.format_description(self.get_description())\n \n def format_help(self,formatter):\n result=[]\n if self.description:\n result.append(self.format_description(formatter))\n if self.option_list:\n result.append(self.format_option_help(formatter))\n return \"\\n\".join(result)\n \n \nclass OptionGroup(OptionContainer):\n\n def __init__(self,parser,title,description=None ):\n self.parser=parser\n OptionContainer.__init__(\n self,parser.option_class,parser.conflict_handler,description)\n self.title=title\n \n def _create_option_list(self):\n self.option_list=[]\n self._share_option_mappings(self.parser)\n \n def set_title(self,title):\n self.title=title\n \n def destroy(self):\n ''\n OptionContainer.destroy(self)\n del self.option_list\n \n \n \n def format_help(self,formatter):\n result=formatter.format_heading(self.title)\n formatter.indent()\n result +=OptionContainer.format_help(self,formatter)\n formatter.dedent()\n return result\n \n \nclass OptionParser(OptionContainer):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n standard_option_list=[]\n \n def __init__(self,\n usage=None ,\n option_list=None ,\n option_class=Option,\n version=None ,\n conflict_handler=\"error\",\n description=None ,\n formatter=None ,\n add_help_option=True ,\n prog=None ,\n epilog=None ):\n OptionContainer.__init__(\n self,option_class,conflict_handler,description)\n self.set_usage(usage)\n self.prog=prog\n self.version=version\n self.allow_interspersed_args=True\n self.process_default_values=True\n if formatter is None :\n formatter=IndentedHelpFormatter()\n self.formatter=formatter\n self.formatter.set_parser(self)\n self.epilog=epilog\n \n \n \n \n \n self._populate_option_list(option_list,\n add_help=add_help_option)\n \n self._init_parsing_state()\n \n \n def destroy(self):\n ''\n\n\n\n\n \n OptionContainer.destroy(self)\n for group in self.option_groups:\n group.destroy()\n del self.option_list\n del self.option_groups\n del self.formatter\n \n \n \n \n \n def _create_option_list(self):\n self.option_list=[]\n self.option_groups=[]\n self._create_option_mappings()\n \n def _add_help_option(self):\n self.add_option(\"-h\",\"--help\",\n action=\"help\",\n help=_(\"show this help message and exit\"))\n \n def _add_version_option(self):\n self.add_option(\"--version\",\n action=\"version\",\n help=_(\"show program's version number and exit\"))\n \n def _populate_option_list(self,option_list,add_help=True ):\n if self.standard_option_list:\n self.add_options(self.standard_option_list)\n if option_list:\n self.add_options(option_list)\n if self.version:\n self._add_version_option()\n if add_help:\n self._add_help_option()\n \n def _init_parsing_state(self):\n \n self.rargs=None\n self.largs=None\n self.values=None\n \n \n \n \n def set_usage(self,usage):\n if usage is None :\n self.usage=_(\"%prog [options]\")\n elif usage is SUPPRESS_USAGE:\n self.usage=None\n \n elif usage.lower().startswith(\"usage: \"):\n self.usage=usage[7:]\n else :\n self.usage=usage\n \n def enable_interspersed_args(self):\n ''\n\n\n\n \n self.allow_interspersed_args=True\n \n def disable_interspersed_args(self):\n ''\n\n\n\n \n self.allow_interspersed_args=False\n \n def set_process_default_values(self,process):\n self.process_default_values=process\n \n def set_default(self,dest,value):\n self.defaults[dest]=value\n \n def set_defaults(self,**kwargs):\n self.defaults.update(kwargs)\n \n def _get_all_options(self):\n options=self.option_list[:]\n for group in self.option_groups:\n options.extend(group.option_list)\n return options\n \n def get_default_values(self):\n if not self.process_default_values:\n \n return Values(self.defaults)\n \n defaults=self.defaults.copy()\n for option in self._get_all_options():\n default=defaults.get(option.dest)\n if isinstance(default,str):\n opt_str=option.get_opt_string()\n defaults[option.dest]=option.check_value(opt_str,default)\n \n return Values(defaults)\n \n \n \n \n def add_option_group(self,*args,**kwargs):\n \n if isinstance(args[0],str):\n group=OptionGroup(self,*args,**kwargs)\n elif len(args)==1 and not kwargs:\n group=args[0]\n if not isinstance(group,OptionGroup):\n raise TypeError(\"not an OptionGroup instance: %r\"%group)\n if group.parser is not self:\n raise ValueError(\"invalid OptionGroup (wrong parser)\")\n else :\n raise TypeError(\"invalid arguments\")\n \n self.option_groups.append(group)\n return group\n \n def get_option_group(self,opt_str):\n option=(self._short_opt.get(opt_str)or\n self._long_opt.get(opt_str))\n if option and option.container is not self:\n return option.container\n return None\n \n \n \n \n def _get_args(self,args):\n if args is None :\n return sys.argv[1:]\n else :\n return args[:]\n \n def parse_args(self,args=None ,values=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n rargs=self._get_args(args)\n if values is None :\n values=self.get_default_values()\n \n \n \n \n \n \n \n \n \n \n self.rargs=rargs\n self.largs=largs=[]\n self.values=values\n \n try :\n stop=self._process_args(largs,rargs,values)\n except (BadOptionError,OptionValueError)as err:\n self.error(str(err))\n \n args=largs+rargs\n return self.check_values(values,args)\n \n def check_values(self,values,args):\n ''\n\n\n\n\n\n\n\n\n \n return (values,args)\n \n def _process_args(self,largs,rargs,values):\n ''\n\n\n\n\n\n\n\n \n while rargs:\n arg=rargs[0]\n \n \n \n if arg ==\"--\":\n del rargs[0]\n return\n elif arg[0:2]==\"--\":\n \n self._process_long_opt(rargs,values)\n elif arg[:1]==\"-\"and len(arg)>1:\n \n \n self._process_short_opts(rargs,values)\n elif self.allow_interspersed_args:\n largs.append(arg)\n del rargs[0]\n else :\n return\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def _match_long_opt(self,opt):\n ''\n\n\n\n\n \n return _match_abbrev(opt,self._long_opt)\n \n def _process_long_opt(self,rargs,values):\n arg=rargs.pop(0)\n \n \n \n if \"=\"in arg:\n (opt,next_arg)=arg.split(\"=\",1)\n rargs.insert(0,next_arg)\n had_explicit_value=True\n else :\n opt=arg\n had_explicit_value=False\n \n opt=self._match_long_opt(opt)\n option=self._long_opt[opt]\n if option.takes_value():\n nargs=option.nargs\n if len(rargs)\".format(self.path)\n return \"\"\n \n def add_dll_directory(path):\n ''\n\n\n\n\n\n\n\n \n import nt\n cookie=nt._add_dll_directory(path)\n return _AddedDllDirectory(\n path,\n cookie,\n nt._remove_dll_directory\n )\n \n \ndef scandir(*args,**kw):\n raise NotImplementedError\n \ndef waitstatus_to_exitcode(status):\n return status >>8\n \n_set=set()\n\nsupports_dir_fd=_set\n\nsupports_effective_ids=_set\n\nsupports_fd=_set\n\nsupports_follow_symlinks=_set\n\n", ["abc", "nt", "os.path", "posix", "posixpath", "sys"]], "pathlib": [".py", "import fnmatch\nimport functools\nimport io\nimport ntpath\nimport os\nimport posixpath\nimport re\nimport sys\nfrom _collections_abc import Sequence\nfrom errno import EINVAL,ENOENT,ENOTDIR,EBADF,ELOOP\nfrom operator import attrgetter\nfrom stat import S_ISDIR,S_ISLNK,S_ISREG,S_ISSOCK,S_ISBLK,S_ISCHR,S_ISFIFO\nfrom urllib.parse import quote_from_bytes as urlquote_from_bytes\n\n\nsupports_symlinks=True\nif os.name =='nt':\n import nt\n if sys.getwindowsversion()[:2]>=(6,0):\n from nt import _getfinalpathname\n else :\n supports_symlinks=False\n _getfinalpathname=None\nelse :\n nt=None\n \n \n__all__=[\n\"PurePath\",\"PurePosixPath\",\"PureWindowsPath\",\n\"Path\",\"PosixPath\",\"WindowsPath\",\n]\n\n\n\n\n\n\n_IGNORED_ERROS=(ENOENT,ENOTDIR,EBADF,ELOOP)\n\n_IGNORED_WINERRORS=(\n21,\n1921,\n)\n\ndef _ignore_error(exception):\n return (getattr(exception,'errno',None )in _IGNORED_ERROS or\n getattr(exception,'winerror',None )in _IGNORED_WINERRORS)\n \n \ndef _is_wildcard_pattern(pat):\n\n\n return \"*\"in pat or \"?\"in pat or \"[\"in pat\n \n \nclass _Flavour(object):\n ''\n \n \n def __init__(self):\n self.join=self.sep.join\n \n def parse_parts(self,parts):\n parsed=[]\n sep=self.sep\n altsep=self.altsep\n drv=root=''\n it=reversed(parts)\n for part in it:\n if not part:\n continue\n if altsep:\n part=part.replace(altsep,sep)\n drv,root,rel=self.splitroot(part)\n if sep in rel:\n for x in reversed(rel.split(sep)):\n if x and x !='.':\n parsed.append(sys.intern(x))\n else :\n if rel and rel !='.':\n parsed.append(sys.intern(rel))\n if drv or root:\n if not drv:\n \n \n \n for part in it:\n if not part:\n continue\n if altsep:\n part=part.replace(altsep,sep)\n drv=self.splitroot(part)[0]\n if drv:\n break\n break\n if drv or root:\n parsed.append(drv+root)\n parsed.reverse()\n return drv,root,parsed\n \n def join_parsed_parts(self,drv,root,parts,drv2,root2,parts2):\n ''\n\n\n \n if root2:\n if not drv2 and drv:\n return drv,root2,[drv+root2]+parts2[1:]\n elif drv2:\n if drv2 ==drv or self.casefold(drv2)==self.casefold(drv):\n \n return drv,root,parts+parts2[1:]\n else :\n \n return drv,root,parts+parts2\n return drv2,root2,parts2\n \n \nclass _WindowsFlavour(_Flavour):\n\n\n\n sep='\\\\'\n altsep='/'\n has_drv=True\n pathmod=ntpath\n \n is_supported=(os.name =='nt')\n \n drive_letters=set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')\n ext_namespace_prefix='\\\\\\\\?\\\\'\n \n reserved_names=(\n {'CON','PRN','AUX','NUL'}|\n {'COM%d'%i for i in range(1,10)}|\n {'LPT%d'%i for i in range(1,10)}\n )\n \n \n \n \n \n \n \n def splitroot(self,part,sep=sep):\n first=part[0:1]\n second=part[1:2]\n if (second ==sep and first ==sep):\n \n \n prefix,part=self._split_extended_path(part)\n first=part[0:1]\n second=part[1:2]\n else :\n prefix=''\n third=part[2:3]\n if (second ==sep and first ==sep and third !=sep):\n \n \n \n \n index=part.find(sep,2)\n if index !=-1:\n index2=part.find(sep,index+1)\n \n \n if index2 !=index+1:\n if index2 ==-1:\n index2=len(part)\n if prefix:\n return prefix+part[1:index2],sep,part[index2+1:]\n else :\n return part[:index2],sep,part[index2+1:]\n drv=root=''\n if second ==':'and first in self.drive_letters:\n drv=part[:2]\n part=part[2:]\n first=third\n if first ==sep:\n root=first\n part=part.lstrip(sep)\n return prefix+drv,root,part\n \n def casefold(self,s):\n return s.lower()\n \n def casefold_parts(self,parts):\n return [p.lower()for p in parts]\n \n def compile_pattern(self,pattern):\n return re.compile(fnmatch.translate(pattern),re.IGNORECASE).fullmatch\n \n def resolve(self,path,strict=False ):\n s=str(path)\n if not s:\n return os.getcwd()\n previous_s=None\n if _getfinalpathname is not None :\n if strict:\n return self._ext_to_normal(_getfinalpathname(s))\n else :\n tail_parts=[]\n while True :\n try :\n s=self._ext_to_normal(_getfinalpathname(s))\n except FileNotFoundError:\n previous_s=s\n s,tail=os.path.split(s)\n tail_parts.append(tail)\n if previous_s ==s:\n return path\n else :\n return os.path.join(s,*reversed(tail_parts))\n \n return None\n \n def _split_extended_path(self,s,ext_prefix=ext_namespace_prefix):\n prefix=''\n if s.startswith(ext_prefix):\n prefix=s[:4]\n s=s[4:]\n if s.startswith('UNC\\\\'):\n prefix +=s[:3]\n s='\\\\'+s[3:]\n return prefix,s\n \n def _ext_to_normal(self,s):\n \n return self._split_extended_path(s)[1]\n \n def is_reserved(self,parts):\n \n \n \n \n if not parts:\n return False\n if parts[0].startswith('\\\\\\\\'):\n \n return False\n return parts[-1].partition('.')[0].upper()in self.reserved_names\n \n def make_uri(self,path):\n \n drive=path.drive\n if len(drive)==2 and drive[1]==':':\n \n rest=path.as_posix()[2:].lstrip('/')\n return 'file:///%s/%s'%(\n drive,urlquote_from_bytes(rest.encode('utf-8')))\n else :\n \n return 'file:'+urlquote_from_bytes(path.as_posix().encode('utf-8'))\n \n def gethomedir(self,username):\n if 'USERPROFILE'in os.environ:\n userhome=os.environ['USERPROFILE']\n elif 'HOMEPATH'in os.environ:\n try :\n drv=os.environ['HOMEDRIVE']\n except KeyError:\n drv=''\n userhome=drv+os.environ['HOMEPATH']\n else :\n raise RuntimeError(\"Can't determine home directory\")\n \n if username:\n \n \n \n \n if os.environ['USERNAME']!=username:\n drv,root,parts=self.parse_parts((userhome,))\n if parts[-1]!=os.environ['USERNAME']:\n raise RuntimeError(\"Can't determine home directory \"\n \"for %r\"%username)\n parts[-1]=username\n if drv or root:\n userhome=drv+root+self.join(parts[1:])\n else :\n userhome=self.join(parts)\n return userhome\n \nclass _PosixFlavour(_Flavour):\n sep='/'\n altsep=''\n has_drv=False\n pathmod=posixpath\n \n is_supported=(os.name !='nt')\n \n def splitroot(self,part,sep=sep):\n if part and part[0]==sep:\n stripped_part=part.lstrip(sep)\n \n \n \n \n \n if len(part)-len(stripped_part)==2:\n return '',sep *2,stripped_part\n else :\n return '',sep,stripped_part\n else :\n return '','',part\n \n def casefold(self,s):\n return s\n \n def casefold_parts(self,parts):\n return parts\n \n def compile_pattern(self,pattern):\n return re.compile(fnmatch.translate(pattern)).fullmatch\n \n def resolve(self,path,strict=False ):\n sep=self.sep\n accessor=path._accessor\n seen={}\n def _resolve(path,rest):\n if rest.startswith(sep):\n path=''\n \n for name in rest.split(sep):\n if not name or name =='.':\n \n continue\n if name =='..':\n \n path,_,_=path.rpartition(sep)\n continue\n if path.endswith(sep):\n newpath=path+name\n else :\n newpath=path+sep+name\n if newpath in seen:\n \n path=seen[newpath]\n if path is not None :\n \n continue\n \n raise RuntimeError(\"Symlink loop from %r\"%newpath)\n \n try :\n target=accessor.readlink(newpath)\n except OSError as e:\n if e.errno !=EINVAL and strict:\n raise\n \n \n path=newpath\n else :\n seen[newpath]=None\n path=_resolve(path,target)\n seen[newpath]=path\n \n return path\n \n \n base=''if path.is_absolute()else os.getcwd()\n return _resolve(base,str(path))or sep\n \n def is_reserved(self,parts):\n return False\n \n def make_uri(self,path):\n \n \n bpath=bytes(path)\n return 'file://'+urlquote_from_bytes(bpath)\n \n def gethomedir(self,username):\n if not username:\n try :\n return os.environ['HOME']\n except KeyError:\n import pwd\n return pwd.getpwuid(os.getuid()).pw_dir\n else :\n import pwd\n try :\n return pwd.getpwnam(username).pw_dir\n except KeyError:\n raise RuntimeError(\"Can't determine home directory \"\n \"for %r\"%username)\n \n \n_windows_flavour=_WindowsFlavour()\n_posix_flavour=_PosixFlavour()\n\n\nclass _Accessor:\n ''\n \n \n \nclass _NormalAccessor(_Accessor):\n\n stat=os.stat\n \n lstat=os.lstat\n \n open=os.open\n \n listdir=os.listdir\n \n scandir=os.scandir\n \n chmod=os.chmod\n \n if hasattr(os,\"lchmod\"):\n lchmod=os.lchmod\n else :\n def lchmod(self,pathobj,mode):\n raise NotImplementedError(\"lchmod() not available on this system\")\n \n mkdir=os.mkdir\n \n unlink=os.unlink\n \n if hasattr(os,\"link\"):\n link_to=os.link\n else :\n @staticmethod\n def link_to(self,target):\n raise NotImplementedError(\"os.link() not available on this system\")\n \n rmdir=os.rmdir\n \n rename=os.rename\n \n replace=os.replace\n \n if nt:\n if supports_symlinks:\n symlink=os.symlink\n else :\n def symlink(a,b,target_is_directory):\n raise NotImplementedError(\"symlink() not available on this system\")\n else :\n \n @staticmethod\n def symlink(a,b,target_is_directory):\n return os.symlink(a,b)\n \n utime=os.utime\n \n \n def readlink(self,path):\n return os.readlink(path)\n \n def owner(self,path):\n try :\n import pwd\n return pwd.getpwuid(self.stat(path).st_uid).pw_name\n except ImportError:\n raise NotImplementedError(\"Path.owner() is unsupported on this system\")\n \n def group(self,path):\n try :\n import grp\n return grp.getgrgid(self.stat(path).st_gid).gr_name\n except ImportError:\n raise NotImplementedError(\"Path.group() is unsupported on this system\")\n \n \n_normal_accessor=_NormalAccessor()\n\n\n\n\n\n\ndef _make_selector(pattern_parts,flavour):\n pat=pattern_parts[0]\n child_parts=pattern_parts[1:]\n if pat =='**':\n cls=_RecursiveWildcardSelector\n elif '**'in pat:\n raise ValueError(\"Invalid pattern: '**' can only be an entire path component\")\n elif _is_wildcard_pattern(pat):\n cls=_WildcardSelector\n else :\n cls=_PreciseSelector\n return cls(pat,child_parts,flavour)\n \nif hasattr(functools,\"lru_cache\"):\n _make_selector=functools.lru_cache()(_make_selector)\n \n \nclass _Selector:\n ''\n \n \n def __init__(self,child_parts,flavour):\n self.child_parts=child_parts\n if child_parts:\n self.successor=_make_selector(child_parts,flavour)\n self.dironly=True\n else :\n self.successor=_TerminatingSelector()\n self.dironly=False\n \n def select_from(self,parent_path):\n ''\n \n path_cls=type(parent_path)\n is_dir=path_cls.is_dir\n exists=path_cls.exists\n scandir=parent_path._accessor.scandir\n if not is_dir(parent_path):\n return iter([])\n return self._select_from(parent_path,is_dir,exists,scandir)\n \n \nclass _TerminatingSelector:\n\n def _select_from(self,parent_path,is_dir,exists,scandir):\n yield parent_path\n \n \nclass _PreciseSelector(_Selector):\n\n def __init__(self,name,child_parts,flavour):\n self.name=name\n _Selector.__init__(self,child_parts,flavour)\n \n def _select_from(self,parent_path,is_dir,exists,scandir):\n try :\n path=parent_path._make_child_relpath(self.name)\n if (is_dir if self.dironly else exists)(path):\n for p in self.successor._select_from(path,is_dir,exists,scandir):\n yield p\n except PermissionError:\n return\n \n \nclass _WildcardSelector(_Selector):\n\n def __init__(self,pat,child_parts,flavour):\n self.match=flavour.compile_pattern(pat)\n _Selector.__init__(self,child_parts,flavour)\n \n def _select_from(self,parent_path,is_dir,exists,scandir):\n try :\n with scandir(parent_path)as scandir_it:\n entries=list(scandir_it)\n for entry in entries:\n if self.dironly:\n try :\n \n \n \n if not entry.is_dir():\n continue\n except OSError as e:\n if not _ignore_error(e):\n raise\n continue\n name=entry.name\n if self.match(name):\n path=parent_path._make_child_relpath(name)\n for p in self.successor._select_from(path,is_dir,exists,scandir):\n yield p\n except PermissionError:\n return\n \n \nclass _RecursiveWildcardSelector(_Selector):\n\n def __init__(self,pat,child_parts,flavour):\n _Selector.__init__(self,child_parts,flavour)\n \n def _iterate_directories(self,parent_path,is_dir,scandir):\n yield parent_path\n try :\n with scandir(parent_path)as scandir_it:\n entries=list(scandir_it)\n for entry in entries:\n entry_is_dir=False\n try :\n entry_is_dir=entry.is_dir()\n except OSError as e:\n if not _ignore_error(e):\n raise\n if entry_is_dir and not entry.is_symlink():\n path=parent_path._make_child_relpath(entry.name)\n for p in self._iterate_directories(path,is_dir,scandir):\n yield p\n except PermissionError:\n return\n \n def _select_from(self,parent_path,is_dir,exists,scandir):\n try :\n yielded=set()\n try :\n successor_select=self.successor._select_from\n for starting_point in self._iterate_directories(parent_path,is_dir,scandir):\n for p in successor_select(starting_point,is_dir,exists,scandir):\n if p not in yielded:\n yield p\n yielded.add(p)\n finally :\n yielded.clear()\n except PermissionError:\n return\n \n \n \n \n \n \nclass _PathParents(Sequence):\n ''\n \n __slots__=('_pathcls','_drv','_root','_parts')\n \n def __init__(self,path):\n \n self._pathcls=type(path)\n self._drv=path._drv\n self._root=path._root\n self._parts=path._parts\n \n def __len__(self):\n if self._drv or self._root:\n return len(self._parts)-1\n else :\n return len(self._parts)\n \n def __getitem__(self,idx):\n if idx <0 or idx >=len(self):\n raise IndexError(idx)\n return self._pathcls._from_parsed_parts(self._drv,self._root,\n self._parts[:-idx -1])\n \n def __repr__(self):\n return \"<{}.parents>\".format(self._pathcls.__name__)\n \n \nclass PurePath(object):\n ''\n\n\n\n\n\n\n \n __slots__=(\n '_drv','_root','_parts',\n '_str','_hash','_pparts','_cached_cparts',\n )\n \n def __new__(cls,*args):\n ''\n\n\n\n \n if cls is PurePath:\n cls=PureWindowsPath if os.name =='nt'else PurePosixPath\n return cls._from_parts(args)\n \n def __reduce__(self):\n \n \n return (self.__class__,tuple(self._parts))\n \n @classmethod\n def _parse_args(cls,args):\n \n \n parts=[]\n for a in args:\n if isinstance(a,PurePath):\n parts +=a._parts\n else :\n a=os.fspath(a)\n if isinstance(a,str):\n \n parts.append(str(a))\n else :\n raise TypeError(\n \"argument should be a str object or an os.PathLike \"\n \"object returning str, not %r\"\n %type(a))\n return cls._flavour.parse_parts(parts)\n \n @classmethod\n def _from_parts(cls,args,init=True ):\n \n \n self=object.__new__(cls)\n drv,root,parts=self._parse_args(args)\n self._drv=drv\n self._root=root\n self._parts=parts\n if init:\n self._init()\n return self\n \n @classmethod\n def _from_parsed_parts(cls,drv,root,parts,init=True ):\n self=object.__new__(cls)\n self._drv=drv\n self._root=root\n self._parts=parts\n if init:\n self._init()\n return self\n \n @classmethod\n def _format_parsed_parts(cls,drv,root,parts):\n if drv or root:\n return drv+root+cls._flavour.join(parts[1:])\n else :\n return cls._flavour.join(parts)\n \n def _init(self):\n \n pass\n \n def _make_child(self,args):\n drv,root,parts=self._parse_args(args)\n drv,root,parts=self._flavour.join_parsed_parts(\n self._drv,self._root,self._parts,drv,root,parts)\n return self._from_parsed_parts(drv,root,parts)\n \n def __str__(self):\n ''\n \n try :\n return self._str\n except AttributeError:\n self._str=self._format_parsed_parts(self._drv,self._root,\n self._parts)or '.'\n return self._str\n \n def __fspath__(self):\n return str(self)\n \n def as_posix(self):\n ''\n \n f=self._flavour\n return str(self).replace(f.sep,'/')\n \n def __bytes__(self):\n ''\n \n return os.fsencode(self)\n \n def __repr__(self):\n return \"{}({!r})\".format(self.__class__.__name__,self.as_posix())\n \n def as_uri(self):\n ''\n if not self.is_absolute():\n raise ValueError(\"relative path can't be expressed as a file URI\")\n return self._flavour.make_uri(self)\n \n @property\n def _cparts(self):\n \n try :\n return self._cached_cparts\n except AttributeError:\n self._cached_cparts=self._flavour.casefold_parts(self._parts)\n return self._cached_cparts\n \n def __eq__(self,other):\n if not isinstance(other,PurePath):\n return NotImplemented\n return self._cparts ==other._cparts and self._flavour is other._flavour\n \n def __hash__(self):\n try :\n return self._hash\n except AttributeError:\n self._hash=hash(tuple(self._cparts))\n return self._hash\n \n def __lt__(self,other):\n if not isinstance(other,PurePath)or self._flavour is not other._flavour:\n return NotImplemented\n return self._cparts other._cparts\n \n def __ge__(self,other):\n if not isinstance(other,PurePath)or self._flavour is not other._flavour:\n return NotImplemented\n return self._cparts >=other._cparts\n \n def __class_getitem__(cls,type):\n return cls\n \n drive=property(attrgetter('_drv'),\n doc=\"\"\"The drive prefix (letter or UNC path), if any.\"\"\")\n \n root=property(attrgetter('_root'),\n doc=\"\"\"The root of the path, if any.\"\"\")\n \n @property\n def anchor(self):\n ''\n anchor=self._drv+self._root\n return anchor\n \n @property\n def name(self):\n ''\n parts=self._parts\n if len(parts)==(1 if (self._drv or self._root)else 0):\n return ''\n return parts[-1]\n \n @property\n def suffix(self):\n ''\n\n\n\n \n name=self.name\n i=name.rfind('.')\n if 0 len(parts):\n return False\n for part,pat in zip(reversed(parts),reversed(pat_parts)):\n if not fnmatch.fnmatchcase(part,pat):\n return False\n return True\n \n \n \nos.PathLike.register(PurePath)\n\n\nclass PurePosixPath(PurePath):\n ''\n\n\n\n \n _flavour=_posix_flavour\n __slots__=()\n \n \nclass PureWindowsPath(PurePath):\n ''\n\n\n\n \n _flavour=_windows_flavour\n __slots__=()\n \n \n \n \n \nclass Path(PurePath):\n ''\n\n\n\n\n\n\n \n __slots__=(\n '_accessor',\n )\n \n def __new__(cls,*args,**kwargs):\n if cls is Path:\n cls=WindowsPath if os.name =='nt'else PosixPath\n self=cls._from_parts(args,init=False )\n if not self._flavour.is_supported:\n raise NotImplementedError(\"cannot instantiate %r on your system\"\n %(cls.__name__,))\n self._init()\n return self\n \n def _init(self,\n \n template=None ,\n ):\n if template is not None :\n self._accessor=template._accessor\n else :\n self._accessor=_normal_accessor\n \n def _make_child_relpath(self,part):\n \n \n parts=self._parts+[part]\n return self._from_parsed_parts(self._drv,self._root,parts)\n \n def __enter__(self):\n return self\n \n def __exit__(self,t,v,tb):\n \n \n \n \n \n \n \n \n pass\n \n def _opener(self,name,flags,mode=0o666):\n \n return self._accessor.open(self,flags,mode)\n \n def _raw_open(self,flags,mode=0o777):\n ''\n\n\n \n return self._accessor.open(self,flags,mode)\n \n \n \n @classmethod\n def cwd(cls):\n ''\n\n \n return cls(os.getcwd())\n \n @classmethod\n def home(cls):\n ''\n\n \n return cls(cls()._flavour.gethomedir(None ))\n \n def samefile(self,other_path):\n ''\n\n \n st=self.stat()\n try :\n other_st=other_path.stat()\n except AttributeError:\n other_st=self._accessor.stat(other_path)\n return os.path.samestat(st,other_st)\n \n def iterdir(self):\n ''\n\n \n for name in self._accessor.listdir(self):\n if name in {'.','..'}:\n \n continue\n yield self._make_child_relpath(name)\n \n def glob(self,pattern):\n ''\n\n \n sys.audit(\"pathlib.Path.glob\",self,pattern)\n if not pattern:\n raise ValueError(\"Unacceptable pattern: {!r}\".format(pattern))\n drv,root,pattern_parts=self._flavour.parse_parts((pattern,))\n if drv or root:\n raise NotImplementedError(\"Non-relative patterns are unsupported\")\n selector=_make_selector(tuple(pattern_parts),self._flavour)\n for p in selector.select_from(self):\n yield p\n \n def rglob(self,pattern):\n ''\n\n\n \n sys.audit(\"pathlib.Path.rglob\",self,pattern)\n drv,root,pattern_parts=self._flavour.parse_parts((pattern,))\n if drv or root:\n raise NotImplementedError(\"Non-relative patterns are unsupported\")\n selector=_make_selector((\"**\",)+tuple(pattern_parts),self._flavour)\n for p in selector.select_from(self):\n yield p\n \n def absolute(self):\n ''\n\n\n\n\n \n \n if self.is_absolute():\n return self\n \n \n obj=self._from_parts([os.getcwd()]+self._parts,init=False )\n obj._init(template=self)\n return obj\n \n def resolve(self,strict=False ):\n ''\n\n\n\n \n s=self._flavour.resolve(self,strict=strict)\n if s is None :\n \n \n self.stat()\n s=str(self.absolute())\n \n normed=self._flavour.pathmod.normpath(s)\n obj=self._from_parts((normed,),init=False )\n obj._init(template=self)\n return obj\n \n def stat(self):\n ''\n\n\n \n return self._accessor.stat(self)\n \n def owner(self):\n ''\n\n \n return self._accessor.owner(self)\n \n def group(self):\n ''\n\n \n return self._accessor.group(self)\n \n def open(self,mode='r',buffering=-1,encoding=None ,\n errors=None ,newline=None ):\n ''\n\n\n \n return io.open(self,mode,buffering,encoding,errors,newline,\n opener=self._opener)\n \n def read_bytes(self):\n ''\n\n \n with self.open(mode='rb')as f:\n return f.read()\n \n def read_text(self,encoding=None ,errors=None ):\n ''\n\n \n with self.open(mode='r',encoding=encoding,errors=errors)as f:\n return f.read()\n \n def write_bytes(self,data):\n ''\n\n \n \n view=memoryview(data)\n with self.open(mode='wb')as f:\n return f.write(view)\n \n def write_text(self,data,encoding=None ,errors=None ):\n ''\n\n \n if not isinstance(data,str):\n raise TypeError('data must be str, not %s'%\n data.__class__.__name__)\n with self.open(mode='w',encoding=encoding,errors=errors)as f:\n return f.write(data)\n \n def readlink(self):\n ''\n\n \n path=self._accessor.readlink(self)\n obj=self._from_parts((path,),init=False )\n obj._init(template=self)\n return obj\n \n def touch(self,mode=0o666,exist_ok=True ):\n ''\n\n \n if exist_ok:\n \n \n \n try :\n self._accessor.utime(self,None )\n except OSError:\n \n pass\n else :\n return\n flags=os.O_CREAT |os.O_WRONLY\n if not exist_ok:\n flags |=os.O_EXCL\n fd=self._raw_open(flags,mode)\n os.close(fd)\n \n def mkdir(self,mode=0o777,parents=False ,exist_ok=False ):\n ''\n\n \n try :\n self._accessor.mkdir(self,mode)\n except FileNotFoundError:\n if not parents or self.parent ==self:\n raise\n self.parent.mkdir(parents=True ,exist_ok=True )\n self.mkdir(mode,parents=False ,exist_ok=exist_ok)\n except OSError:\n \n \n if not exist_ok or not self.is_dir():\n raise\n \n def chmod(self,mode):\n ''\n\n \n self._accessor.chmod(self,mode)\n \n def lchmod(self,mode):\n ''\n\n\n \n self._accessor.lchmod(self,mode)\n \n def unlink(self,missing_ok=False ):\n ''\n\n\n \n try :\n self._accessor.unlink(self)\n except FileNotFoundError:\n if not missing_ok:\n raise\n \n def rmdir(self):\n ''\n\n \n self._accessor.rmdir(self)\n \n def lstat(self):\n ''\n\n\n \n return self._accessor.lstat(self)\n \n def link_to(self,target):\n ''\n\n \n self._accessor.link_to(self,target)\n \n def rename(self,target):\n ''\n\n\n \n self._accessor.rename(self,target)\n return self.__class__(target)\n \n def replace(self,target):\n ''\n\n\n\n \n self._accessor.replace(self,target)\n return self.__class__(target)\n \n def symlink_to(self,target,target_is_directory=False ):\n ''\n\n\n \n self._accessor.symlink(target,self,target_is_directory)\n \n \n \n def exists(self):\n ''\n\n \n try :\n self.stat()\n except OSError as e:\n if not _ignore_error(e):\n raise\n return False\n except ValueError:\n \n return False\n return True\n \n def is_dir(self):\n ''\n\n \n try :\n return S_ISDIR(self.stat().st_mode)\n except OSError as e:\n if not _ignore_error(e):\n raise\n \n \n return False\n except ValueError:\n \n return False\n \n def is_file(self):\n ''\n\n\n \n try :\n return S_ISREG(self.stat().st_mode)\n except OSError as e:\n if not _ignore_error(e):\n raise\n \n \n return False\n except ValueError:\n \n return False\n \n def is_mount(self):\n ''\n\n \n \n if not self.exists()or not self.is_dir():\n return False\n \n try :\n parent_dev=self.parent.stat().st_dev\n except OSError:\n return False\n \n dev=self.stat().st_dev\n if dev !=parent_dev:\n return True\n ino=self.stat().st_ino\n parent_ino=self.parent.stat().st_ino\n return ino ==parent_ino\n \n def is_symlink(self):\n ''\n\n \n try :\n return S_ISLNK(self.lstat().st_mode)\n except OSError as e:\n if not _ignore_error(e):\n raise\n \n return False\n except ValueError:\n \n return False\n \n def is_block_device(self):\n ''\n\n \n try :\n return S_ISBLK(self.stat().st_mode)\n except OSError as e:\n if not _ignore_error(e):\n raise\n \n \n return False\n except ValueError:\n \n return False\n \n def is_char_device(self):\n ''\n\n \n try :\n return S_ISCHR(self.stat().st_mode)\n except OSError as e:\n if not _ignore_error(e):\n raise\n \n \n return False\n except ValueError:\n \n return False\n \n def is_fifo(self):\n ''\n\n \n try :\n return S_ISFIFO(self.stat().st_mode)\n except OSError as e:\n if not _ignore_error(e):\n raise\n \n \n return False\n except ValueError:\n \n return False\n \n def is_socket(self):\n ''\n\n \n try :\n return S_ISSOCK(self.stat().st_mode)\n except OSError as e:\n if not _ignore_error(e):\n raise\n \n \n return False\n except ValueError:\n \n return False\n \n def expanduser(self):\n ''\n\n \n if (not (self._drv or self._root)and\n self._parts and self._parts[0][:1]=='~'):\n homedir=self._flavour.gethomedir(self._parts[0][1:])\n return self._from_parts([homedir]+self._parts[1:])\n \n return self\n \n \nclass PosixPath(Path,PurePosixPath):\n ''\n\n\n \n __slots__=()\n \nclass WindowsPath(Path,PureWindowsPath):\n ''\n\n\n \n __slots__=()\n \n def is_mount(self):\n raise NotImplementedError(\"Path.is_mount() is unsupported on this system\")\n", ["_collections_abc", "errno", "fnmatch", "functools", "grp", "io", "nt", "ntpath", "operator", "os", "posixpath", "pwd", "re", "stat", "sys", "urllib.parse"]], "pdb": [".py", "#! /usr/bin/env python3\n\n\"\"\"\nThe Python Debugger Pdb\n=======================\n\nTo use the debugger in its simplest form:\n\n >>> import pdb\n >>> pdb.run('')\n\nThe debugger's prompt is '(Pdb) '. This will stop in the first\nfunction call in .\n\nAlternatively, if a statement terminated with an unhandled exception,\nyou can use pdb's post-mortem facility to inspect the contents of the\ntraceback:\n\n >>> \n \n >>> import pdb\n >>> pdb.pm()\n\nThe commands recognized by the debugger are listed in the next\nsection. Most can be abbreviated as indicated; e.g., h(elp) means\nthat 'help' can be typed as 'h' or 'help' (but not as 'he' or 'hel',\nnor as 'H' or 'Help' or 'HELP'). Optional arguments are enclosed in\nsquare brackets. Alternatives in the command syntax are separated\nby a vertical bar (|).\n\nA blank line repeats the previous command literally, except for\n'list', where it lists the next 11 lines.\n\nCommands that the debugger doesn't recognize are assumed to be Python\nstatements and are executed in the context of the program being\ndebugged. Python statements can also be prefixed with an exclamation\npoint ('!'). This is a powerful way to inspect the program being\ndebugged; it is even possible to change variables or call functions.\nWhen an exception occurs in such a statement, the exception name is\nprinted but the debugger's state is not changed.\n\nThe debugger supports aliases, which can save typing. And aliases can\nhave parameters (see the alias help entry) which allows one a certain\nlevel of adaptability to the context under examination.\n\nMultiple commands may be entered on a single line, separated by the\npair ';;'. No intelligence is applied to separating the commands; the\ninput is split at the first ';;', even if it is in the middle of a\nquoted string.\n\nIf a file \".pdbrc\" exists in your home directory or in the current\ndirectory, it is read in and executed as if it had been typed at the\ndebugger prompt. This is particularly useful for aliases. If both\nfiles exist, the one in the home directory is read first and aliases\ndefined there can be overridden by the local file. This behavior can be\ndisabled by passing the \"readrc=False\" argument to the Pdb constructor.\n\nAside from aliases, the debugger is not directly programmable; but it\nis implemented as a class from which you can derive your own debugger\nclass, which you can make as fancy as you like.\n\n\nDebugger commands\n=================\n\n\"\"\"\n\n\n\nimport os\nimport re\nimport sys\nimport cmd\nimport bdb\nimport dis\nimport code\nimport glob\nimport pprint\nimport signal\nimport inspect\nimport traceback\nimport linecache\n\n\nclass Restart(Exception):\n ''\n pass\n \n__all__=[\"run\",\"pm\",\"Pdb\",\"runeval\",\"runctx\",\"runcall\",\"set_trace\",\n\"post_mortem\",\"help\"]\n\ndef find_function(funcname,filename):\n cre=re.compile(r'def\\s+%s\\s*[(]'%re.escape(funcname))\n try :\n fp=open(filename)\n except OSError:\n return None\n \n with fp:\n for lineno,line in enumerate(fp,start=1):\n if cre.match(line):\n return funcname,filename,lineno\n return None\n \ndef getsourcelines(obj):\n lines,lineno=inspect.findsource(obj)\n if inspect.isframe(obj)and obj.f_globals is obj.f_locals:\n \n return lines,1\n elif inspect.ismodule(obj):\n return lines,1\n return inspect.getblock(lines[lineno:]),lineno+1\n \ndef lasti2lineno(code,lasti):\n linestarts=list(dis.findlinestarts(code))\n linestarts.reverse()\n for i,lineno in linestarts:\n if lasti >=i:\n return lineno\n return 0\n \n \nclass _rstr(str):\n ''\n def __repr__(self):\n return self\n \n \n \n \n \n \n \nline_prefix='\\n-> '\n\nclass Pdb(bdb.Bdb,cmd.Cmd):\n\n _previous_sigint_handler=None\n \n def __init__(self,completekey='tab',stdin=None ,stdout=None ,skip=None ,\n nosigint=False ,readrc=True ):\n bdb.Bdb.__init__(self,skip=skip)\n cmd.Cmd.__init__(self,completekey,stdin,stdout)\n sys.audit(\"pdb.Pdb\")\n if stdout:\n self.use_rawinput=0\n self.prompt='(Pdb) '\n self.aliases={}\n self.displaying={}\n self.mainpyfile=''\n self._wait_for_mainpyfile=False\n self.tb_lineno={}\n \n try :\n import readline\n \n readline.set_completer_delims(' \\t\\n`@#$%^&*()=+[{]}\\\\|;:\\'\",<>?')\n except ImportError:\n pass\n self.allow_kbdint=False\n self.nosigint=nosigint\n \n \n self.rcLines=[]\n if readrc:\n try :\n with open(os.path.expanduser('~/.pdbrc'))as rcFile:\n self.rcLines.extend(rcFile)\n except OSError:\n pass\n try :\n with open(\".pdbrc\")as rcFile:\n self.rcLines.extend(rcFile)\n except OSError:\n pass\n \n self.commands={}\n self.commands_doprompt={}\n \n self.commands_silent={}\n \n self.commands_defining=False\n \n self.commands_bnum=None\n \n \n def sigint_handler(self,signum,frame):\n if self.allow_kbdint:\n raise KeyboardInterrupt\n self.message(\"\\nProgram interrupted. (Use 'cont' to resume).\")\n self.set_step()\n self.set_trace(frame)\n \n def reset(self):\n bdb.Bdb.reset(self)\n self.forget()\n \n def forget(self):\n self.lineno=None\n self.stack=[]\n self.curindex=0\n self.curframe=None\n self.tb_lineno.clear()\n \n def setup(self,f,tb):\n self.forget()\n self.stack,self.curindex=self.get_stack(f,tb)\n while tb:\n \n \n \n lineno=lasti2lineno(tb.tb_frame.f_code,tb.tb_lasti)\n self.tb_lineno[tb.tb_frame]=lineno\n tb=tb.tb_next\n self.curframe=self.stack[self.curindex][0]\n \n \n \n self.curframe_locals=self.curframe.f_locals\n return self.execRcLines()\n \n \n def execRcLines(self):\n if not self.rcLines:\n return\n \n rcLines=self.rcLines\n rcLines.reverse()\n \n self.rcLines=[]\n while rcLines:\n line=rcLines.pop().strip()\n if line and line[0]!='#':\n if self.onecmd(line):\n \n \n \n self.rcLines +=reversed(rcLines)\n return True\n \n \n \n def user_call(self,frame,argument_list):\n ''\n \n if self._wait_for_mainpyfile:\n return\n if self.stop_here(frame):\n self.message('--Call--')\n self.interaction(frame,None )\n \n def user_line(self,frame):\n ''\n if self._wait_for_mainpyfile:\n if (self.mainpyfile !=self.canonic(frame.f_code.co_filename)\n or frame.f_lineno <=0):\n return\n self._wait_for_mainpyfile=False\n if self.bp_commands(frame):\n self.interaction(frame,None )\n \n def bp_commands(self,frame):\n ''\n\n\n\n \n \n if getattr(self,\"currentbp\",False )and\\\n self.currentbp in self.commands:\n currentbp=self.currentbp\n self.currentbp=0\n lastcmd_back=self.lastcmd\n self.setup(frame,None )\n for line in self.commands[currentbp]:\n self.onecmd(line)\n self.lastcmd=lastcmd_back\n if not self.commands_silent[currentbp]:\n self.print_stack_entry(self.stack[self.curindex])\n if self.commands_doprompt[currentbp]:\n self._cmdloop()\n self.forget()\n return\n return 1\n \n def user_return(self,frame,return_value):\n ''\n if self._wait_for_mainpyfile:\n return\n frame.f_locals['__return__']=return_value\n self.message('--Return--')\n self.interaction(frame,None )\n \n def user_exception(self,frame,exc_info):\n ''\n \n if self._wait_for_mainpyfile:\n return\n exc_type,exc_value,exc_traceback=exc_info\n frame.f_locals['__exception__']=exc_type,exc_value\n \n \n \n \n \n \n prefix='Internal 'if (not exc_traceback\n and exc_type is StopIteration)else ''\n self.message('%s%s'%(prefix,\n traceback.format_exception_only(exc_type,exc_value)[-1].strip()))\n self.interaction(frame,exc_traceback)\n \n \n def _cmdloop(self):\n while True :\n try :\n \n \n self.allow_kbdint=True\n self.cmdloop()\n self.allow_kbdint=False\n break\n except KeyboardInterrupt:\n self.message('--KeyboardInterrupt--')\n \n \n def preloop(self):\n displaying=self.displaying.get(self.curframe)\n if displaying:\n for expr,oldvalue in displaying.items():\n newvalue=self._getval_except(expr)\n \n \n \n if newvalue is not oldvalue and newvalue !=oldvalue:\n displaying[expr]=newvalue\n self.message('display %s: %r [old: %r]'%\n (expr,newvalue,oldvalue))\n \n def interaction(self,frame,traceback):\n \n if Pdb._previous_sigint_handler:\n try :\n signal.signal(signal.SIGINT,Pdb._previous_sigint_handler)\n except ValueError:\n pass\n else :\n Pdb._previous_sigint_handler=None\n if self.setup(frame,traceback):\n \n \n self.forget()\n return\n self.print_stack_entry(self.stack[self.curindex])\n self._cmdloop()\n self.forget()\n \n def displayhook(self,obj):\n ''\n\n \n \n if obj is not None :\n self.message(repr(obj))\n \n def default(self,line):\n if line[:1]=='!':line=line[1:]\n locals=self.curframe_locals\n globals=self.curframe.f_globals\n try :\n code=compile(line+'\\n','','single')\n save_stdout=sys.stdout\n save_stdin=sys.stdin\n save_displayhook=sys.displayhook\n try :\n sys.stdin=self.stdin\n sys.stdout=self.stdout\n sys.displayhook=self.displayhook\n exec(code,globals,locals)\n finally :\n sys.stdout=save_stdout\n sys.stdin=save_stdin\n sys.displayhook=save_displayhook\n except :\n exc_info=sys.exc_info()[:2]\n self.error(traceback.format_exception_only(*exc_info)[-1].strip())\n \n def precmd(self,line):\n ''\n if not line.strip():\n return line\n args=line.split()\n while args[0]in self.aliases:\n line=self.aliases[args[0]]\n ii=1\n for tmpArg in args[1:]:\n line=line.replace(\"%\"+str(ii),\n tmpArg)\n ii +=1\n line=line.replace(\"%*\",' '.join(args[1:]))\n args=line.split()\n \n \n if args[0]!='alias':\n marker=line.find(';;')\n if marker >=0:\n \n next=line[marker+2:].lstrip()\n self.cmdqueue.append(next)\n line=line[:marker].rstrip()\n return line\n \n def onecmd(self,line):\n ''\n\n\n\n\n \n if not self.commands_defining:\n return cmd.Cmd.onecmd(self,line)\n else :\n return self.handle_command_def(line)\n \n def handle_command_def(self,line):\n ''\n cmd,arg,line=self.parseline(line)\n if not cmd:\n return\n if cmd =='silent':\n self.commands_silent[self.commands_bnum]=True\n return\n elif cmd =='end':\n self.cmdqueue=[]\n return 1\n cmdlist=self.commands[self.commands_bnum]\n if arg:\n cmdlist.append(cmd+' '+arg)\n else :\n cmdlist.append(cmd)\n \n try :\n func=getattr(self,'do_'+cmd)\n except AttributeError:\n func=self.default\n \n if func.__name__ in self.commands_resuming:\n self.commands_doprompt[self.commands_bnum]=False\n self.cmdqueue=[]\n return 1\n return\n \n \n \n def message(self,msg):\n print(msg,file=self.stdout)\n \n def error(self,msg):\n print('***',msg,file=self.stdout)\n \n \n \n \n def _complete_location(self,text,line,begidx,endidx):\n \n if line.strip().endswith((':',',')):\n \n return []\n \n try :\n ret=self._complete_expression(text,line,begidx,endidx)\n except Exception:\n ret=[]\n \n globs=glob.glob(text+'*')\n for fn in globs:\n if os.path.isdir(fn):\n ret.append(fn+'/')\n elif os.path.isfile(fn)and fn.lower().endswith(('.py','.pyw')):\n ret.append(fn+':')\n return ret\n \n def _complete_bpnumber(self,text,line,begidx,endidx):\n \n \n \n return [str(i)for i,bp in enumerate(bdb.Breakpoint.bpbynumber)\n if bp is not None and str(i).startswith(text)]\n \n def _complete_expression(self,text,line,begidx,endidx):\n \n if not self.curframe:\n return []\n \n \n \n ns={**self.curframe.f_globals,**self.curframe_locals}\n if '.'in text:\n \n \n \n dotted=text.split('.')\n try :\n obj=ns[dotted[0]]\n for part in dotted[1:-1]:\n obj=getattr(obj,part)\n except (KeyError,AttributeError):\n return []\n prefix='.'.join(dotted[:-1])+'.'\n return [prefix+n for n in dir(obj)if n.startswith(dotted[-1])]\n else :\n \n return [n for n in ns.keys()if n.startswith(text)]\n \n \n \n \n \n def do_commands(self,arg):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not arg:\n bnum=len(bdb.Breakpoint.bpbynumber)-1\n else :\n try :\n bnum=int(arg)\n except :\n self.error(\"Usage: commands [bnum]\\n ...\\n end\")\n return\n self.commands_bnum=bnum\n \n if bnum in self.commands:\n old_command_defs=(self.commands[bnum],\n self.commands_doprompt[bnum],\n self.commands_silent[bnum])\n else :\n old_command_defs=None\n self.commands[bnum]=[]\n self.commands_doprompt[bnum]=True\n self.commands_silent[bnum]=False\n \n prompt_back=self.prompt\n self.prompt='(com) '\n self.commands_defining=True\n try :\n self.cmdloop()\n except KeyboardInterrupt:\n \n if old_command_defs:\n self.commands[bnum]=old_command_defs[0]\n self.commands_doprompt[bnum]=old_command_defs[1]\n self.commands_silent[bnum]=old_command_defs[2]\n else :\n del self.commands[bnum]\n del self.commands_doprompt[bnum]\n del self.commands_silent[bnum]\n self.error('command definition aborted, old commands restored')\n finally :\n self.commands_defining=False\n self.prompt=prompt_back\n \n complete_commands=_complete_bpnumber\n \n def do_break(self,arg,temporary=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not arg:\n if self.breaks:\n self.message(\"Num Type Disp Enb Where\")\n for bp in bdb.Breakpoint.bpbynumber:\n if bp:\n self.message(bp.bpformat())\n return\n \n \n filename=None\n lineno=None\n cond=None\n comma=arg.find(',')\n if comma >0:\n \n cond=arg[comma+1:].lstrip()\n arg=arg[:comma].rstrip()\n \n colon=arg.rfind(':')\n funcname=None\n if colon >=0:\n filename=arg[:colon].rstrip()\n f=self.lookupmodule(filename)\n if not f:\n self.error('%r not found from sys.path'%filename)\n return\n else :\n filename=f\n arg=arg[colon+1:].lstrip()\n try :\n lineno=int(arg)\n except ValueError:\n self.error('Bad lineno: %s'%arg)\n return\n else :\n \n try :\n lineno=int(arg)\n except ValueError:\n try :\n func=eval(arg,\n self.curframe.f_globals,\n self.curframe_locals)\n except :\n func=arg\n try :\n if hasattr(func,'__func__'):\n func=func.__func__\n code=func.__code__\n \n \n funcname=code.co_name\n lineno=code.co_firstlineno\n filename=code.co_filename\n except :\n \n (ok,filename,ln)=self.lineinfo(arg)\n if not ok:\n self.error('The specified object %r is not a function '\n 'or was not found along sys.path.'%arg)\n return\n funcname=ok\n lineno=int(ln)\n if not filename:\n filename=self.defaultFile()\n \n line=self.checkline(filename,lineno)\n if line:\n \n err=self.set_break(filename,line,temporary,cond,funcname)\n if err:\n self.error(err)\n else :\n bp=self.get_breaks(filename,line)[-1]\n self.message(\"Breakpoint %d at %s:%d\"%\n (bp.number,bp.file,bp.line))\n \n \n def defaultFile(self):\n ''\n filename=self.curframe.f_code.co_filename\n if filename ==''and self.mainpyfile:\n filename=self.mainpyfile\n return filename\n \n do_b=do_break\n \n complete_break=_complete_location\n complete_b=_complete_location\n \n def do_tbreak(self,arg):\n ''\n\n\n \n self.do_break(arg,1)\n \n complete_tbreak=_complete_location\n \n def lineinfo(self,identifier):\n failed=(None ,None ,None )\n \n idstring=identifier.split(\"'\")\n if len(idstring)==1:\n \n id=idstring[0].strip()\n elif len(idstring)==3:\n \n id=idstring[1].strip()\n else :\n return failed\n if id =='':return failed\n parts=id.split('.')\n \n if parts[0]=='self':\n del parts[0]\n if len(parts)==0:\n return failed\n \n fname=self.defaultFile()\n if len(parts)==1:\n item=parts[0]\n else :\n \n \n f=self.lookupmodule(parts[0])\n if f:\n fname=f\n item=parts[1]\n answer=find_function(item,fname)\n return answer or failed\n \n def checkline(self,filename,lineno):\n ''\n\n\n\n \n \n \n globs=self.curframe.f_globals if hasattr(self,'curframe')else None\n line=linecache.getline(filename,lineno,globs)\n if not line:\n self.message('End of file')\n return 0\n line=line.strip()\n \n if (not line or (line[0]=='#')or\n (line[:3]=='\"\"\"')or line[:3]==\"'''\"):\n self.error('Blank or comment')\n return 0\n return lineno\n \n def do_enable(self,arg):\n ''\n\n\n \n args=arg.split()\n for i in args:\n try :\n bp=self.get_bpbynumber(i)\n except ValueError as err:\n self.error(err)\n else :\n bp.enable()\n self.message('Enabled %s'%bp)\n \n complete_enable=_complete_bpnumber\n \n def do_disable(self,arg):\n ''\n\n\n\n\n\n \n args=arg.split()\n for i in args:\n try :\n bp=self.get_bpbynumber(i)\n except ValueError as err:\n self.error(err)\n else :\n bp.disable()\n self.message('Disabled %s'%bp)\n \n complete_disable=_complete_bpnumber\n \n def do_condition(self,arg):\n ''\n\n\n\n\n \n args=arg.split(' ',1)\n try :\n cond=args[1]\n except IndexError:\n cond=None\n try :\n bp=self.get_bpbynumber(args[0].strip())\n except IndexError:\n self.error('Breakpoint number expected')\n except ValueError as err:\n self.error(err)\n else :\n bp.cond=cond\n if not cond:\n self.message('Breakpoint %d is now unconditional.'%bp.number)\n else :\n self.message('New condition set for breakpoint %d.'%bp.number)\n \n complete_condition=_complete_bpnumber\n \n def do_ignore(self,arg):\n ''\n\n\n\n\n\n\n \n args=arg.split()\n try :\n count=int(args[1].strip())\n except :\n count=0\n try :\n bp=self.get_bpbynumber(args[0].strip())\n except IndexError:\n self.error('Breakpoint number expected')\n except ValueError as err:\n self.error(err)\n else :\n bp.ignore=count\n if count >0:\n if count >1:\n countstr='%d crossings'%count\n else :\n countstr='1 crossing'\n self.message('Will ignore next %s of breakpoint %d.'%\n (countstr,bp.number))\n else :\n self.message('Will stop next time breakpoint %d is reached.'\n %bp.number)\n \n complete_ignore=_complete_bpnumber\n \n def do_clear(self,arg):\n ''\n\n\n\n\n \n if not arg:\n try :\n reply=input('Clear all breaks? ')\n except EOFError:\n reply='no'\n reply=reply.strip().lower()\n if reply in ('y','yes'):\n bplist=[bp for bp in bdb.Breakpoint.bpbynumber if bp]\n self.clear_all_breaks()\n for bp in bplist:\n self.message('Deleted %s'%bp)\n return\n if ':'in arg:\n \n i=arg.rfind(':')\n filename=arg[:i]\n arg=arg[i+1:]\n try :\n lineno=int(arg)\n except ValueError:\n err=\"Invalid line number (%s)\"%arg\n else :\n bplist=self.get_breaks(filename,lineno)\n err=self.clear_break(filename,lineno)\n if err:\n self.error(err)\n else :\n for bp in bplist:\n self.message('Deleted %s'%bp)\n return\n numberlist=arg.split()\n for i in numberlist:\n try :\n bp=self.get_bpbynumber(i)\n except ValueError as err:\n self.error(err)\n else :\n self.clear_bpbynumber(i)\n self.message('Deleted %s'%bp)\n do_cl=do_clear\n \n complete_clear=_complete_location\n complete_cl=_complete_location\n \n def do_where(self,arg):\n ''\n\n\n\n \n self.print_stack_trace()\n do_w=do_where\n do_bt=do_where\n \n def _select_frame(self,number):\n assert 0 <=number =2:\n self.error('No help for %r; please do not run Python with -OO '\n 'if you need command help'%arg)\n return\n self.message(command.__doc__.rstrip())\n \n do_h=do_help\n \n def help_exec(self):\n ''\n\n\n\n\n\n\n\n \n self.message((self.help_exec.__doc__ or '').strip())\n \n def help_pdb(self):\n help()\n \n \n \n def lookupmodule(self,filename):\n ''\n\n\n\n \n if os.path.isabs(filename)and os.path.exists(filename):\n return filename\n f=os.path.join(sys.path[0],filename)\n if os.path.exists(f)and self.canonic(f)==self.mainpyfile:\n return f\n root,ext=os.path.splitext(filename)\n if ext =='':\n filename=filename+'.py'\n if os.path.isabs(filename):\n return filename\n for dirname in sys.path:\n while os.path.islink(dirname):\n dirname=os.readlink(dirname)\n fullname=os.path.join(dirname,filename)\n if os.path.exists(fullname):\n return fullname\n return None\n \n def _runmodule(self,module_name):\n self._wait_for_mainpyfile=True\n self._user_requested_quit=False\n import runpy\n mod_name,mod_spec,code=runpy._get_module_details(module_name)\n self.mainpyfile=self.canonic(code.co_filename)\n import __main__\n __main__.__dict__.clear()\n __main__.__dict__.update({\n \"__name__\":\"__main__\",\n \"__file__\":self.mainpyfile,\n \"__package__\":mod_spec.parent,\n \"__loader__\":mod_spec.loader,\n \"__spec__\":mod_spec,\n \"__builtins__\":__builtins__,\n })\n self.run(code)\n \n def _runscript(self,filename):\n \n \n \n \n \n import __main__\n __main__.__dict__.clear()\n __main__.__dict__.update({\"__name__\":\"__main__\",\n \"__file__\":filename,\n \"__builtins__\":__builtins__,\n })\n \n \n \n \n \n \n self._wait_for_mainpyfile=True\n self.mainpyfile=self.canonic(filename)\n self._user_requested_quit=False\n with open(filename,\"rb\")as fp:\n statement=\"exec(compile(%r, %r, 'exec'))\"%\\\n (fp.read(),self.mainpyfile)\n self.run(statement)\n \n \n \nif __doc__ is not None :\n\n _help_order=[\n 'help','where','down','up','break','tbreak','clear','disable',\n 'enable','ignore','condition','commands','step','next','until',\n 'jump','return','retval','run','continue','list','longlist',\n 'args','p','pp','whatis','source','display','undisplay',\n 'interact','alias','unalias','debug','quit',\n ]\n \n for _command in _help_order:\n __doc__ +=getattr(Pdb,'do_'+_command).__doc__.strip()+'\\n\\n'\n __doc__ +=Pdb.help_exec.__doc__\n \n del _help_order,_command\n \n \n \n \ndef run(statement,globals=None ,locals=None ):\n Pdb().run(statement,globals,locals)\n \ndef runeval(expression,globals=None ,locals=None ):\n return Pdb().runeval(expression,globals,locals)\n \ndef runctx(statement,globals,locals):\n\n run(statement,globals,locals)\n \ndef runcall(*args,**kwds):\n return Pdb().runcall(*args,**kwds)\n \ndef set_trace(*,header=None ):\n pdb=Pdb()\n if header is not None :\n pdb.message(header)\n pdb.set_trace(sys._getframe().f_back)\n \n \n \ndef post_mortem(t=None ):\n\n if t is None :\n \n \n t=sys.exc_info()[2]\n if t is None :\n raise ValueError(\"A valid traceback must be passed if no \"\n \"exception is being handled\")\n \n p=Pdb()\n p.reset()\n p.interaction(None ,t)\n \ndef pm():\n post_mortem(sys.last_traceback)\n \n \n \n \nTESTCMD='import x; x.main()'\n\ndef test():\n run(TESTCMD)\n \n \ndef help():\n import pydoc\n pydoc.pager(__doc__)\n \n_usage=\"\"\"\\\nusage: pdb.py [-c command] ... [-m module | pyfile] [arg] ...\n\nDebug the Python program given by pyfile. Alternatively,\nan executable module or package to debug can be specified using\nthe -m switch.\n\nInitial commands are read from .pdbrc files in your home directory\nand in the current directory, if they exist. Commands supplied with\n-c are executed after commands from .pdbrc files.\n\nTo let the script run until an exception occurs, use \"-c continue\".\nTo let the script run up to a given line X in the debugged file, use\n\"-c 'until X'\".\"\"\"\n\ndef main():\n import getopt\n \n opts,args=getopt.getopt(sys.argv[1:],'mhc:',['help','command='])\n \n if not args:\n print(_usage)\n sys.exit(2)\n \n commands=[]\n run_as_module=False\n for opt,optarg in opts:\n if opt in ['-h','--help']:\n print(_usage)\n sys.exit()\n elif opt in ['-c','--command']:\n commands.append(optarg)\n elif opt in ['-m']:\n run_as_module=True\n \n mainpyfile=args[0]\n if not run_as_module and not os.path.exists(mainpyfile):\n print('Error:',mainpyfile,'does not exist')\n sys.exit(1)\n \n sys.argv[:]=args\n \n \n if not run_as_module:\n sys.path[0]=os.path.dirname(mainpyfile)\n \n \n \n \n \n pdb=Pdb()\n pdb.rcLines.extend(commands)\n while True :\n try :\n if run_as_module:\n pdb._runmodule(mainpyfile)\n else :\n pdb._runscript(mainpyfile)\n if pdb._user_requested_quit:\n break\n print(\"The program finished and will be restarted\")\n except Restart:\n print(\"Restarting\",mainpyfile,\"with arguments:\")\n print(\"\\t\"+\" \".join(args))\n except SystemExit:\n \n print(\"The program exited via sys.exit(). Exit status:\",end=' ')\n print(sys.exc_info()[1])\n except SyntaxError:\n traceback.print_exc()\n sys.exit(1)\n except :\n traceback.print_exc()\n print(\"Uncaught exception. Entering post mortem debugging\")\n print(\"Running 'cont' or 'step' will restart the program\")\n t=sys.exc_info()[2]\n pdb.interaction(None ,t)\n print(\"Post mortem debugger finished. The \"+mainpyfile+\n \" will be restarted\")\n \n \n \nif __name__ =='__main__':\n import pdb\n pdb.main()\n", ["__main__", "bdb", "cmd", "code", "dis", "getopt", "glob", "inspect", "linecache", "os", "pdb", "pprint", "pydoc", "re", "readline", "runpy", "shlex", "signal", "sys", "traceback"]], "pickle": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom types import FunctionType\nfrom copyreg import dispatch_table\nfrom copyreg import _extension_registry,_inverted_registry,_extension_cache\nfrom itertools import islice\nfrom functools import partial\nimport sys\nfrom sys import maxsize\nfrom struct import pack,unpack\nimport re\nimport io\nimport codecs\nimport _compat_pickle\n\n__all__=[\"PickleError\",\"PicklingError\",\"UnpicklingError\",\"Pickler\",\n\"Unpickler\",\"dump\",\"dumps\",\"load\",\"loads\"]\n\ntry :\n from _pickle import PickleBuffer\n __all__.append(\"PickleBuffer\")\n _HAVE_PICKLE_BUFFER=True\nexcept ImportError:\n _HAVE_PICKLE_BUFFER=False\n \n \n \nbytes_types=(bytes,bytearray)\n\n\nformat_version=\"4.0\"\ncompatible_formats=[\"1.0\",\n\"1.1\",\n\"1.2\",\n\"1.3\",\n\"2.0\",\n\"3.0\",\n\"4.0\",\n\"5.0\",\n]\n\n\nHIGHEST_PROTOCOL=5\n\n\n\n\nDEFAULT_PROTOCOL=4\n\nclass PickleError(Exception):\n ''\n pass\n \nclass PicklingError(PickleError):\n ''\n\n\n \n pass\n \nclass UnpicklingError(PickleError):\n ''\n\n\n\n\n\n\n \n pass\n \n \n \nclass _Stop(Exception):\n def __init__(self,value):\n self.value=value\n \n \ntry :\n from org.python.core import PyStringMap\nexcept ImportError:\n PyStringMap=None\n \n \n \n \n \nMARK=b'('\nSTOP=b'.'\nPOP=b'0'\nPOP_MARK=b'1'\nDUP=b'2'\nFLOAT=b'F'\nINT=b'I'\nBININT=b'J'\nBININT1=b'K'\nLONG=b'L'\nBININT2=b'M'\nNONE=b'N'\nPERSID=b'P'\nBINPERSID=b'Q'\nREDUCE=b'R'\nSTRING=b'S'\nBINSTRING=b'T'\nSHORT_BINSTRING=b'U'\nUNICODE=b'V'\nBINUNICODE=b'X'\nAPPEND=b'a'\nBUILD=b'b'\nGLOBAL=b'c'\nDICT=b'd'\nEMPTY_DICT=b'}'\nAPPENDS=b'e'\nGET=b'g'\nBINGET=b'h'\nINST=b'i'\nLONG_BINGET=b'j'\nLIST=b'l'\nEMPTY_LIST=b']'\nOBJ=b'o'\nPUT=b'p'\nBINPUT=b'q'\nLONG_BINPUT=b'r'\nSETITEM=b's'\nTUPLE=b't'\nEMPTY_TUPLE=b')'\nSETITEMS=b'u'\nBINFLOAT=b'G'\n\nTRUE=b'I01\\n'\nFALSE=b'I00\\n'\n\n\n\nPROTO=b'\\x80'\nNEWOBJ=b'\\x81'\nEXT1=b'\\x82'\nEXT2=b'\\x83'\nEXT4=b'\\x84'\nTUPLE1=b'\\x85'\nTUPLE2=b'\\x86'\nTUPLE3=b'\\x87'\nNEWTRUE=b'\\x88'\nNEWFALSE=b'\\x89'\nLONG1=b'\\x8a'\nLONG4=b'\\x8b'\n\n_tuplesize2code=[EMPTY_TUPLE,TUPLE1,TUPLE2,TUPLE3]\n\n\n\nBINBYTES=b'B'\nSHORT_BINBYTES=b'C'\n\n\n\nSHORT_BINUNICODE=b'\\x8c'\nBINUNICODE8=b'\\x8d'\nBINBYTES8=b'\\x8e'\nEMPTY_SET=b'\\x8f'\nADDITEMS=b'\\x90'\nFROZENSET=b'\\x91'\nNEWOBJ_EX=b'\\x92'\nSTACK_GLOBAL=b'\\x93'\nMEMOIZE=b'\\x94'\nFRAME=b'\\x95'\n\n\n\nBYTEARRAY8=b'\\x96'\nNEXT_BUFFER=b'\\x97'\nREADONLY_BUFFER=b'\\x98'\n\n__all__.extend([x for x in dir()if re.match(\"[A-Z][A-Z0-9_]+$\",x)])\n\n\nclass _Framer:\n\n _FRAME_SIZE_MIN=4\n _FRAME_SIZE_TARGET=64 *1024\n \n def __init__(self,file_write):\n self.file_write=file_write\n self.current_frame=None\n \n def start_framing(self):\n self.current_frame=io.BytesIO()\n \n def end_framing(self):\n if self.current_frame and self.current_frame.tell()>0:\n self.commit_frame(force=True )\n self.current_frame=None\n \n def commit_frame(self,force=False ):\n if self.current_frame:\n f=self.current_frame\n if f.tell()>=self._FRAME_SIZE_TARGET or force:\n data=f.getbuffer()\n write=self.file_write\n if len(data)>=self._FRAME_SIZE_MIN:\n \n \n \n \n write(FRAME+pack(\"':\n raise AttributeError(\"Can't get local attribute {!r} on {!r}\"\n .format(name,obj))\n try :\n parent=obj\n obj=getattr(obj,subpath)\n except AttributeError:\n raise AttributeError(\"Can't get attribute {!r} on {!r}\"\n .format(name,obj))from None\n return obj,parent\n \ndef whichmodule(obj,name):\n ''\n module_name=getattr(obj,'__module__',None )\n if module_name is not None :\n return module_name\n \n \n for module_name,module in sys.modules.copy().items():\n if module_name =='__main__'or module is None :\n continue\n try :\n if _getattribute(module,name)[0]is obj:\n return module_name\n except AttributeError:\n pass\n return '__main__'\n \ndef encode_long(x):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if x ==0:\n return b''\n nbytes=(x.bit_length()>>3)+1\n result=x.to_bytes(nbytes,byteorder='little',signed=True )\n if x <0 and nbytes >1:\n if result[-1]==0xff and (result[-2]&0x80)!=0:\n result=result[:-1]\n return result\n \ndef decode_long(data):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return int.from_bytes(data,byteorder='little',signed=True )\n \n \n \n \nclass _Pickler:\n\n def __init__(self,file,protocol=None ,*,fix_imports=True ,\n buffer_callback=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if protocol is None :\n protocol=DEFAULT_PROTOCOL\n if protocol <0:\n protocol=HIGHEST_PROTOCOL\n elif not 0 <=protocol <=HIGHEST_PROTOCOL:\n raise ValueError(\"pickle protocol must be <= %d\"%HIGHEST_PROTOCOL)\n if buffer_callback is not None and protocol <5:\n raise ValueError(\"buffer_callback needs protocol >= 5\")\n self._buffer_callback=buffer_callback\n try :\n self._file_write=file.write\n except AttributeError:\n raise TypeError(\"file must have a 'write' attribute\")\n self.framer=_Framer(self._file_write)\n self.write=self.framer.write\n self._write_large_bytes=self.framer.write_large_bytes\n self.memo={}\n self.proto=int(protocol)\n self.bin=protocol >=1\n self.fast=0\n self.fix_imports=fix_imports and protocol <3\n \n def clear_memo(self):\n ''\n\n\n\n\n\n \n self.memo.clear()\n \n def dump(self,obj):\n ''\n \n \n if not hasattr(self,\"_file_write\"):\n raise PicklingError(\"Pickler.__init__() was not called by \"\n \"%s.__init__()\"%(self.__class__.__name__,))\n if self.proto >=2:\n self.write(PROTO+pack(\"=4:\n self.framer.start_framing()\n self.save(obj)\n self.write(STOP)\n self.framer.end_framing()\n \n def memoize(self,obj):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.fast:\n return\n assert id(obj)not in self.memo\n idx=len(self.memo)\n self.write(self.put(idx))\n self.memo[id(obj)]=idx,obj\n \n \n def put(self,idx):\n if self.proto >=4:\n return MEMOIZE\n elif self.bin:\n if idx <256:\n return BINPUT+pack(\"=2 and func_name ==\"__newobj_ex__\":\n cls,args,kwargs=args\n if not hasattr(cls,\"__new__\"):\n raise PicklingError(\"args[0] from {} args has no __new__\"\n .format(func_name))\n if obj is not None and cls is not obj.__class__:\n raise PicklingError(\"args[0] from {} args has the wrong class\"\n .format(func_name))\n if self.proto >=4:\n save(cls)\n save(args)\n save(kwargs)\n write(NEWOBJ_EX)\n else :\n func=partial(cls.__new__,cls,*args,**kwargs)\n save(func)\n save(())\n write(REDUCE)\n elif self.proto >=2 and func_name ==\"__newobj__\":\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n cls=args[0]\n if not hasattr(cls,\"__new__\"):\n raise PicklingError(\n \"args[0] from __newobj__ args has no __new__\")\n if obj is not None and cls is not obj.__class__:\n raise PicklingError(\n \"args[0] from __newobj__ args has the wrong class\")\n args=args[1:]\n save(cls)\n save(args)\n write(NEWOBJ)\n else :\n save(func)\n save(args)\n write(REDUCE)\n \n if obj is not None :\n \n \n \n if id(obj)in self.memo:\n write(POP+self.get(self.memo[id(obj)][0]))\n else :\n self.memoize(obj)\n \n \n \n \n \n \n if listitems is not None :\n self._batch_appends(listitems)\n \n if dictitems is not None :\n self._batch_setitems(dictitems)\n \n if state is not None :\n if state_setter is None :\n save(state)\n write(BUILD)\n else :\n \n \n \n \n save(state_setter)\n save(obj)\n save(state)\n write(TUPLE2)\n \n write(REDUCE)\n \n \n \n \n write(POP)\n \n \n \n dispatch={}\n \n def save_none(self,obj):\n self.write(NONE)\n dispatch[type(None )]=save_none\n \n def save_bool(self,obj):\n if self.proto >=2:\n self.write(NEWTRUE if obj else NEWFALSE)\n else :\n self.write(TRUE if obj else FALSE)\n dispatch[bool]=save_bool\n \n def save_long(self,obj):\n if self.bin:\n \n \n \n \n if obj >=0:\n if obj <=0xff:\n self.write(BININT1+pack(\"=2:\n encoded=encode_long(obj)\n n=len(encoded)\n if n <256:\n self.write(LONG1+pack(\"d',obj))\n else :\n self.write(FLOAT+repr(obj).encode(\"ascii\")+b'\\n')\n dispatch[float]=save_float\n \n def save_bytes(self,obj):\n if self.proto <3:\n if not obj:\n self.save_reduce(bytes,(),obj=obj)\n else :\n self.save_reduce(codecs.encode,\n (str(obj,'latin1'),'latin1'),obj=obj)\n return\n n=len(obj)\n if n <=0xff:\n self.write(SHORT_BINBYTES+pack(\"0xffffffff and self.proto >=4:\n self._write_large_bytes(BINBYTES8+pack(\"=self.framer._FRAME_SIZE_TARGET:\n self._write_large_bytes(BINBYTES+pack(\"=self.framer._FRAME_SIZE_TARGET:\n self._write_large_bytes(BYTEARRAY8+pack(\"= 5\")\n with obj.raw()as m:\n if not m.contiguous:\n raise PicklingError(\"PickleBuffer can not be pickled when \"\n \"pointing to a non-contiguous buffer\")\n in_band=True\n if self._buffer_callback is not None :\n in_band=bool(self._buffer_callback(obj))\n if in_band:\n \n \n if m.readonly:\n self.save_bytes(m.tobytes())\n else :\n self.save_bytearray(m.tobytes())\n else :\n \n self.write(NEXT_BUFFER)\n if m.readonly:\n self.write(READONLY_BUFFER)\n \n dispatch[PickleBuffer]=save_picklebuffer\n \n def save_str(self,obj):\n if self.bin:\n encoded=obj.encode('utf-8','surrogatepass')\n n=len(encoded)\n if n <=0xff and self.proto >=4:\n self.write(SHORT_BINUNICODE+pack(\"0xffffffff and self.proto >=4:\n self._write_large_bytes(BINUNICODE8+pack(\"=self.framer._FRAME_SIZE_TARGET:\n self._write_large_bytes(BINUNICODE+pack(\"=2:\n for element in obj:\n save(element)\n \n if id(obj)in memo:\n get=self.get(memo[id(obj)][0])\n self.write(POP *n+get)\n else :\n self.write(_tuplesize2code[n])\n self.memoize(obj)\n return\n \n \n \n write=self.write\n write(MARK)\n for element in obj:\n save(element)\n \n if id(obj)in memo:\n \n \n \n \n \n \n \n get=self.get(memo[id(obj)][0])\n if self.bin:\n write(POP_MARK+get)\n else :\n write(POP *(n+1)+get)\n return\n \n \n write(TUPLE)\n self.memoize(obj)\n \n dispatch[tuple]=save_tuple\n \n def save_list(self,obj):\n if self.bin:\n self.write(EMPTY_LIST)\n else :\n self.write(MARK+LIST)\n \n self.memoize(obj)\n self._batch_appends(obj)\n \n dispatch[list]=save_list\n \n _BATCHSIZE=1000\n \n def _batch_appends(self,items):\n \n save=self.save\n write=self.write\n \n if not self.bin:\n for x in items:\n save(x)\n write(APPEND)\n return\n \n it=iter(items)\n while True :\n tmp=list(islice(it,self._BATCHSIZE))\n n=len(tmp)\n if n >1:\n write(MARK)\n for x in tmp:\n save(x)\n write(APPENDS)\n elif n:\n save(tmp[0])\n write(APPEND)\n \n if n 1:\n write(MARK)\n for k,v in tmp:\n save(k)\n save(v)\n write(SETITEMS)\n elif n:\n k,v=tmp[0]\n save(k)\n save(v)\n write(SETITEM)\n \n if n 0:\n write(MARK)\n for item in batch:\n save(item)\n write(ADDITEMS)\n if n =2:\n code=_extension_registry.get((module_name,name))\n if code:\n assert code >0\n if code <=0xff:\n write(EXT1+pack(\"=4:\n self.save(module_name)\n self.save(name)\n write(STACK_GLOBAL)\n elif parent is not module:\n self.save_reduce(getattr,(parent,lastname))\n elif self.proto >=3:\n write(GLOBAL+bytes(module_name,\"utf-8\")+b'\\n'+\n bytes(name,\"utf-8\")+b'\\n')\n else :\n if self.fix_imports:\n r_name_mapping=_compat_pickle.REVERSE_NAME_MAPPING\n r_import_mapping=_compat_pickle.REVERSE_IMPORT_MAPPING\n if (module_name,name)in r_name_mapping:\n module_name,name=r_name_mapping[(module_name,name)]\n elif module_name in r_import_mapping:\n module_name=r_import_mapping[module_name]\n try :\n write(GLOBAL+bytes(module_name,\"ascii\")+b'\\n'+\n bytes(name,\"ascii\")+b'\\n')\n except UnicodeEncodeError:\n raise PicklingError(\n \"can't pickle global identifier '%s.%s' using \"\n \"pickle protocol %i\"%(module,name,self.proto))from None\n \n self.memoize(obj)\n \n def save_type(self,obj):\n if obj is type(None ):\n return self.save_reduce(type,(None ,),obj=obj)\n elif obj is type(NotImplemented):\n return self.save_reduce(type,(NotImplemented,),obj=obj)\n elif obj is type(...):\n return self.save_reduce(type,(...,),obj=obj)\n return self.save_global(obj)\n \n dispatch[FunctionType]=save_global\n dispatch[type]=save_type\n \n \n \n \nclass _Unpickler:\n\n def __init__(self,file,*,fix_imports=True ,\n encoding=\"ASCII\",errors=\"strict\",buffers=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n self._buffers=iter(buffers)if buffers is not None else None\n self._file_readline=file.readline\n self._file_read=file.read\n self.memo={}\n self.encoding=encoding\n self.errors=errors\n self.proto=0\n self.fix_imports=fix_imports\n \n def load(self):\n ''\n\n\n \n \n \n if not hasattr(self,\"_file_read\"):\n raise UnpicklingError(\"Unpickler.__init__() was not called by \"\n \"%s.__init__()\"%(self.__class__.__name__,))\n self._unframer=_Unframer(self._file_read,self._file_readline)\n self.read=self._unframer.read\n self.readinto=self._unframer.readinto\n self.readline=self._unframer.readline\n self.metastack=[]\n self.stack=[]\n self.append=self.stack.append\n self.proto=0\n read=self.read\n dispatch=self.dispatch\n try :\n while True :\n key=read(1)\n if not key:\n raise EOFError\n assert isinstance(key,bytes_types)\n dispatch[key[0]](self)\n except _Stop as stopinst:\n return stopinst.value\n \n \n def pop_mark(self):\n items=self.stack\n self.stack=self.metastack.pop()\n self.append=self.stack.append\n return items\n \n def persistent_load(self,pid):\n raise UnpicklingError(\"unsupported persistent id encountered\")\n \n dispatch={}\n \n def load_proto(self):\n proto=self.read(1)[0]\n if not 0 <=proto <=HIGHEST_PROTOCOL:\n raise ValueError(\"unsupported pickle protocol: %d\"%proto)\n self.proto=proto\n dispatch[PROTO[0]]=load_proto\n \n def load_frame(self):\n frame_size,=unpack('sys.maxsize:\n raise ValueError(\"frame size > sys.maxsize: %d\"%frame_size)\n self._unframer.load_frame(frame_size)\n dispatch[FRAME[0]]=load_frame\n \n def load_persid(self):\n try :\n pid=self.readline()[:-1].decode(\"ascii\")\n except UnicodeDecodeError:\n raise UnpicklingError(\n \"persistent IDs in protocol 0 must be ASCII strings\")\n self.append(self.persistent_load(pid))\n dispatch[PERSID[0]]=load_persid\n \n def load_binpersid(self):\n pid=self.stack.pop()\n self.append(self.persistent_load(pid))\n dispatch[BINPERSID[0]]=load_binpersid\n \n def load_none(self):\n self.append(None )\n dispatch[NONE[0]]=load_none\n \n def load_false(self):\n self.append(False )\n dispatch[NEWFALSE[0]]=load_false\n \n def load_true(self):\n self.append(True )\n dispatch[NEWTRUE[0]]=load_true\n \n def load_int(self):\n data=self.readline()\n if data ==FALSE[1:]:\n val=False\n elif data ==TRUE[1:]:\n val=True\n else :\n val=int(data,0)\n self.append(val)\n dispatch[INT[0]]=load_int\n \n def load_binint(self):\n self.append(unpack('d',self.read(8))[0])\n dispatch[BINFLOAT[0]]=load_binfloat\n \n def _decode_string(self,value):\n \n \n \n if self.encoding ==\"bytes\":\n return value\n else :\n return value.decode(self.encoding,self.errors)\n \n def load_string(self):\n data=self.readline()[:-1]\n \n if len(data)>=2 and data[0]==data[-1]and data[0]in b'\"\\'':\n data=data[1:-1]\n else :\n raise UnpicklingError(\"the STRING opcode argument must be quoted\")\n self.append(self._decode_string(codecs.escape_decode(data)[0]))\n dispatch[STRING[0]]=load_string\n \n def load_binstring(self):\n \n len,=unpack('maxsize:\n raise UnpicklingError(\"BINBYTES exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(self.read(len))\n dispatch[BINBYTES[0]]=load_binbytes\n \n def load_unicode(self):\n self.append(str(self.readline()[:-1],'raw-unicode-escape'))\n dispatch[UNICODE[0]]=load_unicode\n \n def load_binunicode(self):\n len,=unpack('maxsize:\n raise UnpicklingError(\"BINUNICODE exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(str(self.read(len),'utf-8','surrogatepass'))\n dispatch[BINUNICODE[0]]=load_binunicode\n \n def load_binunicode8(self):\n len,=unpack('maxsize:\n raise UnpicklingError(\"BINUNICODE8 exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(str(self.read(len),'utf-8','surrogatepass'))\n dispatch[BINUNICODE8[0]]=load_binunicode8\n \n def load_binbytes8(self):\n len,=unpack('maxsize:\n raise UnpicklingError(\"BINBYTES8 exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n self.append(self.read(len))\n dispatch[BINBYTES8[0]]=load_binbytes8\n \n def load_bytearray8(self):\n len,=unpack('maxsize:\n raise UnpicklingError(\"BYTEARRAY8 exceeds system's maximum size \"\n \"of %d bytes\"%maxsize)\n b=bytearray(len)\n self.readinto(b)\n self.append(b)\n dispatch[BYTEARRAY8[0]]=load_bytearray8\n \n def load_next_buffer(self):\n if self._buffers is None :\n raise UnpicklingError(\"pickle stream refers to out-of-band data \"\n \"but no *buffers* argument was given\")\n try :\n buf=next(self._buffers)\n except StopIteration:\n raise UnpicklingError(\"not enough out-of-band buffers\")\n self.append(buf)\n dispatch[NEXT_BUFFER[0]]=load_next_buffer\n \n def load_readonly_buffer(self):\n buf=self.stack[-1]\n with memoryview(buf)as m:\n if not m.readonly:\n self.stack[-1]=m.toreadonly()\n dispatch[READONLY_BUFFER[0]]=load_readonly_buffer\n \n def load_short_binstring(self):\n len=self.read(1)[0]\n data=self.read(len)\n self.append(self._decode_string(data))\n dispatch[SHORT_BINSTRING[0]]=load_short_binstring\n \n def load_short_binbytes(self):\n len=self.read(1)[0]\n self.append(self.read(len))\n dispatch[SHORT_BINBYTES[0]]=load_short_binbytes\n \n def load_short_binunicode(self):\n len=self.read(1)[0]\n self.append(str(self.read(len),'utf-8','surrogatepass'))\n dispatch[SHORT_BINUNICODE[0]]=load_short_binunicode\n \n def load_tuple(self):\n items=self.pop_mark()\n self.append(tuple(items))\n dispatch[TUPLE[0]]=load_tuple\n \n def load_empty_tuple(self):\n self.append(())\n dispatch[EMPTY_TUPLE[0]]=load_empty_tuple\n \n def load_tuple1(self):\n self.stack[-1]=(self.stack[-1],)\n dispatch[TUPLE1[0]]=load_tuple1\n \n def load_tuple2(self):\n self.stack[-2:]=[(self.stack[-2],self.stack[-1])]\n dispatch[TUPLE2[0]]=load_tuple2\n \n def load_tuple3(self):\n self.stack[-3:]=[(self.stack[-3],self.stack[-2],self.stack[-1])]\n dispatch[TUPLE3[0]]=load_tuple3\n \n def load_empty_list(self):\n self.append([])\n dispatch[EMPTY_LIST[0]]=load_empty_list\n \n def load_empty_dictionary(self):\n self.append({})\n dispatch[EMPTY_DICT[0]]=load_empty_dictionary\n \n def load_empty_set(self):\n self.append(set())\n dispatch[EMPTY_SET[0]]=load_empty_set\n \n def load_frozenset(self):\n items=self.pop_mark()\n self.append(frozenset(items))\n dispatch[FROZENSET[0]]=load_frozenset\n \n def load_list(self):\n items=self.pop_mark()\n self.append(items)\n dispatch[LIST[0]]=load_list\n \n def load_dict(self):\n items=self.pop_mark()\n d={items[i]:items[i+1]\n for i in range(0,len(items),2)}\n self.append(d)\n dispatch[DICT[0]]=load_dict\n \n \n \n \n \n \n def _instantiate(self,klass,args):\n if (args or not isinstance(klass,type)or\n hasattr(klass,\"__getinitargs__\")):\n try :\n value=klass(*args)\n except TypeError as err:\n raise TypeError(\"in constructor for %s: %s\"%\n (klass.__name__,str(err)),sys.exc_info()[2])\n else :\n value=klass.__new__(klass)\n self.append(value)\n \n def load_inst(self):\n module=self.readline()[:-1].decode(\"ascii\")\n name=self.readline()[:-1].decode(\"ascii\")\n klass=self.find_class(module,name)\n self._instantiate(klass,self.pop_mark())\n dispatch[INST[0]]=load_inst\n \n def load_obj(self):\n \n args=self.pop_mark()\n cls=args.pop(0)\n self._instantiate(cls,args)\n dispatch[OBJ[0]]=load_obj\n \n def load_newobj(self):\n args=self.stack.pop()\n cls=self.stack.pop()\n obj=cls.__new__(cls,*args)\n self.append(obj)\n dispatch[NEWOBJ[0]]=load_newobj\n \n def load_newobj_ex(self):\n kwargs=self.stack.pop()\n args=self.stack.pop()\n cls=self.stack.pop()\n obj=cls.__new__(cls,*args,**kwargs)\n self.append(obj)\n dispatch[NEWOBJ_EX[0]]=load_newobj_ex\n \n def load_global(self):\n module=self.readline()[:-1].decode(\"utf-8\")\n name=self.readline()[:-1].decode(\"utf-8\")\n klass=self.find_class(module,name)\n self.append(klass)\n dispatch[GLOBAL[0]]=load_global\n \n def load_stack_global(self):\n name=self.stack.pop()\n module=self.stack.pop()\n if type(name)is not str or type(module)is not str:\n raise UnpicklingError(\"STACK_GLOBAL requires str\")\n self.append(self.find_class(module,name))\n dispatch[STACK_GLOBAL[0]]=load_stack_global\n \n def load_ext1(self):\n code=self.read(1)[0]\n self.get_extension(code)\n dispatch[EXT1[0]]=load_ext1\n \n def load_ext2(self):\n code,=unpack('=4:\n return _getattribute(sys.modules[module],name)[0]\n else :\n return getattr(sys.modules[module],name)\n \n def load_reduce(self):\n stack=self.stack\n args=stack.pop()\n func=stack[-1]\n stack[-1]=func(*args)\n dispatch[REDUCE[0]]=load_reduce\n \n def load_pop(self):\n if self.stack:\n del self.stack[-1]\n else :\n self.pop_mark()\n dispatch[POP[0]]=load_pop\n \n def load_pop_mark(self):\n self.pop_mark()\n dispatch[POP_MARK[0]]=load_pop_mark\n \n def load_dup(self):\n self.append(self.stack[-1])\n dispatch[DUP[0]]=load_dup\n \n def load_get(self):\n i=int(self.readline()[:-1])\n try :\n self.append(self.memo[i])\n except KeyError:\n msg=f'Memo value not found at index {i}'\n raise UnpicklingError(msg)from None\n dispatch[GET[0]]=load_get\n \n def load_binget(self):\n i=self.read(1)[0]\n try :\n self.append(self.memo[i])\n except KeyError as exc:\n msg=f'Memo value not found at index {i}'\n raise UnpicklingError(msg)from None\n dispatch[BINGET[0]]=load_binget\n \n def load_long_binget(self):\n i,=unpack('maxsize:\n raise ValueError(\"negative LONG_BINPUT argument\")\n self.memo[i]=self.stack[-1]\n dispatch[LONG_BINPUT[0]]=load_long_binput\n \n def load_memoize(self):\n memo=self.memo\n memo[len(memo)]=self.stack[-1]\n dispatch[MEMOIZE[0]]=load_memoize\n \n def load_append(self):\n stack=self.stack\n value=stack.pop()\n list=stack[-1]\n list.append(value)\n dispatch[APPEND[0]]=load_append\n \n def load_appends(self):\n items=self.pop_mark()\n list_obj=self.stack[-1]\n try :\n extend=list_obj.extend\n except AttributeError:\n pass\n else :\n extend(items)\n return\n \n \n \n append=list_obj.append\n for item in items:\n append(item)\n dispatch[APPENDS[0]]=load_appends\n \n def load_setitem(self):\n stack=self.stack\n value=stack.pop()\n key=stack.pop()\n dict=stack[-1]\n dict[key]=value\n dispatch[SETITEM[0]]=load_setitem\n \n def load_setitems(self):\n items=self.pop_mark()\n dict=self.stack[-1]\n for i in range(0,len(items),2):\n dict[items[i]]=items[i+1]\n dispatch[SETITEMS[0]]=load_setitems\n \n def load_additems(self):\n items=self.pop_mark()\n set_obj=self.stack[-1]\n if isinstance(set_obj,set):\n set_obj.update(items)\n else :\n add=set_obj.add\n for item in items:\n add(item)\n dispatch[ADDITEMS[0]]=load_additems\n \n def load_build(self):\n stack=self.stack\n state=stack.pop()\n inst=stack[-1]\n setstate=getattr(inst,\"__setstate__\",None )\n if setstate is not None :\n setstate(state)\n return\n slotstate=None\n if isinstance(state,tuple)and len(state)==2:\n state,slotstate=state\n if state:\n inst_dict=inst.__dict__\n intern=sys.intern\n for k,v in state.items():\n if type(k)is str:\n inst_dict[intern(k)]=v\n else :\n inst_dict[k]=v\n if slotstate:\n for k,v in slotstate.items():\n setattr(inst,k,v)\n dispatch[BUILD[0]]=load_build\n \n def load_mark(self):\n self.metastack.append(self.stack)\n self.stack=[]\n self.append=self.stack.append\n dispatch[MARK[0]]=load_mark\n \n def load_stop(self):\n value=self.stack.pop()\n raise _Stop(value)\n dispatch[STOP[0]]=load_stop\n \n \n \n \ndef _dump(obj,file,protocol=None ,*,fix_imports=True ,buffer_callback=None ):\n _Pickler(file,protocol,fix_imports=fix_imports,\n buffer_callback=buffer_callback).dump(obj)\n \ndef _dumps(obj,protocol=None ,*,fix_imports=True ,buffer_callback=None ):\n f=io.BytesIO()\n _Pickler(f,protocol,fix_imports=fix_imports,\n buffer_callback=buffer_callback).dump(obj)\n res=f.getvalue()\n assert isinstance(res,bytes_types)\n return res\n \ndef _load(file,*,fix_imports=True ,encoding=\"ASCII\",errors=\"strict\",\nbuffers=None ):\n return _Unpickler(file,fix_imports=fix_imports,buffers=buffers,\n encoding=encoding,errors=errors).load()\n \ndef _loads(s,/,*,fix_imports=True ,encoding=\"ASCII\",errors=\"strict\",\nbuffers=None ):\n if isinstance(s,str):\n raise TypeError(\"Can't load pickle from unicode string\")\n file=io.BytesIO(s)\n return _Unpickler(file,fix_imports=fix_imports,buffers=buffers,\n encoding=encoding,errors=errors).load()\n \n \ntry :\n from _pickle import (\n PickleError,\n PicklingError,\n UnpicklingError,\n Pickler,\n Unpickler,\n dump,\n dumps,\n load,\n loads\n )\nexcept ImportError:\n Pickler,Unpickler=_Pickler,_Unpickler\n dump,dumps,load,loads=_dump,_dumps,_load,_loads\n \n \ndef _test():\n import doctest\n return doctest.testmod()\n \nif __name__ ==\"__main__\":\n import argparse\n parser=argparse.ArgumentParser(\n description='display contents of the pickle files')\n parser.add_argument(\n 'pickle_file',type=argparse.FileType('br'),\n nargs='*',help='the pickle file')\n parser.add_argument(\n '-t','--test',action='store_true',\n help='run self-test suite')\n parser.add_argument(\n '-v',action='store_true',\n help='run verbosely; only affects self-test run')\n args=parser.parse_args()\n if args.test:\n _test()\n else :\n if not args.pickle_file:\n parser.print_help()\n else :\n import pprint\n for f in args.pickle_file:\n obj=load(f)\n pprint.pprint(obj)\n", ["_compat_pickle", "_pickle", "argparse", "codecs", "copyreg", "doctest", "functools", "io", "itertools", "org.python.core", "pprint", "re", "struct", "sys", "types"]], "pkgutil": [".py", "''\n\nfrom collections import namedtuple\nfrom functools import singledispatch as simplegeneric\nimport importlib\nimport importlib.util\nimport importlib.machinery\nimport os\nimport os.path\nimport re\nimport sys\nfrom types import ModuleType\nimport warnings\n\n__all__=[\n'get_importer','iter_importers','get_loader','find_loader',\n'walk_packages','iter_modules','get_data',\n'ImpImporter','ImpLoader','read_code','extend_path',\n'ModuleInfo',\n]\n\n\nModuleInfo=namedtuple('ModuleInfo','module_finder name ispkg')\nModuleInfo.__doc__='A namedtuple with minimal info about a module.'\n\n\ndef _get_spec(finder,name):\n ''\n \n try :\n find_spec=finder.find_spec\n except AttributeError:\n loader=finder.find_module(name)\n if loader is None :\n return None\n return importlib.util.spec_from_loader(name,loader)\n else :\n return find_spec(name)\n \n \ndef read_code(stream):\n\n\n import marshal\n \n magic=stream.read(4)\n if magic !=importlib.util.MAGIC_NUMBER:\n return None\n \n stream.read(12)\n return marshal.load(stream)\n \n \ndef walk_packages(path=None ,prefix='',onerror=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def seen(p,m={}):\n if p in m:\n return True\n m[p]=True\n \n for info in iter_modules(path,prefix):\n yield info\n \n if info.ispkg:\n try :\n __import__(info.name)\n except ImportError:\n if onerror is not None :\n onerror(info.name)\n except Exception:\n if onerror is not None :\n onerror(info.name)\n else :\n raise\n else :\n path=getattr(sys.modules[info.name],'__path__',None )or []\n \n \n path=[p for p in path if not seen(p)]\n \n yield from walk_packages(path,info.name+'.',onerror)\n \n \ndef iter_modules(path=None ,prefix=''):\n ''\n\n\n\n\n\n\n\n \n if path is None :\n importers=iter_importers()\n elif isinstance(path,str):\n raise ValueError(\"path must be None or list of paths to look for \"\n \"modules in\")\n else :\n importers=map(get_importer,path)\n \n yielded={}\n for i in importers:\n for name,ispkg in iter_importer_modules(i,prefix):\n if name not in yielded:\n yielded[name]=1\n yield ModuleInfo(i,name,ispkg)\n \n \n@simplegeneric\ndef iter_importer_modules(importer,prefix=''):\n if not hasattr(importer,'iter_modules'):\n return []\n return importer.iter_modules(prefix)\n \n \n \ndef _iter_file_finder_modules(importer,prefix=''):\n if importer.path is None or not os.path.isdir(importer.path):\n return\n \n yielded={}\n import inspect\n try :\n filenames=os.listdir(importer.path)\n except OSError:\n \n filenames=[]\n filenames.sort()\n \n for fn in filenames:\n modname=inspect.getmodulename(fn)\n if modname =='__init__'or modname in yielded:\n continue\n \n path=os.path.join(importer.path,fn)\n ispkg=False\n \n if not modname and os.path.isdir(path)and '.'not in fn:\n modname=fn\n try :\n dircontents=os.listdir(path)\n except OSError:\n \n dircontents=[]\n for fn in dircontents:\n subname=inspect.getmodulename(fn)\n if subname =='__init__':\n ispkg=True\n break\n else :\n continue\n \n if modname and '.'not in modname:\n yielded[modname]=1\n yield prefix+modname,ispkg\n \niter_importer_modules.register(\nimportlib.machinery.FileFinder,_iter_file_finder_modules)\n\n\ndef _import_imp():\n global imp\n with warnings.catch_warnings():\n warnings.simplefilter('ignore',DeprecationWarning)\n imp=importlib.import_module('imp')\n \nclass ImpImporter:\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,path=None ):\n global imp\n warnings.warn(\"This emulation is deprecated, use 'importlib' instead\",\n DeprecationWarning)\n _import_imp()\n self.path=path\n \n def find_module(self,fullname,path=None ):\n \n subname=fullname.split(\".\")[-1]\n if subname !=fullname and self.path is None :\n return None\n if self.path is None :\n path=None\n else :\n path=[os.path.realpath(self.path)]\n try :\n file,filename,etc=imp.find_module(subname,path)\n except ImportError:\n return None\n return ImpLoader(fullname,file,filename,etc)\n \n def iter_modules(self,prefix=''):\n if self.path is None or not os.path.isdir(self.path):\n return\n \n yielded={}\n import inspect\n try :\n filenames=os.listdir(self.path)\n except OSError:\n \n filenames=[]\n filenames.sort()\n \n for fn in filenames:\n modname=inspect.getmodulename(fn)\n if modname =='__init__'or modname in yielded:\n continue\n \n path=os.path.join(self.path,fn)\n ispkg=False\n \n if not modname and os.path.isdir(path)and '.'not in fn:\n modname=fn\n try :\n dircontents=os.listdir(path)\n except OSError:\n \n dircontents=[]\n for fn in dircontents:\n subname=inspect.getmodulename(fn)\n if subname =='__init__':\n ispkg=True\n break\n else :\n continue\n \n if modname and '.'not in modname:\n yielded[modname]=1\n yield prefix+modname,ispkg\n \n \nclass ImpLoader:\n ''\n \n code=source=None\n \n def __init__(self,fullname,file,filename,etc):\n warnings.warn(\"This emulation is deprecated, use 'importlib' instead\",\n DeprecationWarning)\n _import_imp()\n self.file=file\n self.filename=filename\n self.fullname=fullname\n self.etc=etc\n \n def load_module(self,fullname):\n self._reopen()\n try :\n mod=imp.load_module(fullname,self.file,self.filename,self.etc)\n finally :\n if self.file:\n self.file.close()\n \n \n return mod\n \n def get_data(self,pathname):\n with open(pathname,\"rb\")as file:\n return file.read()\n \n def _reopen(self):\n if self.file and self.file.closed:\n mod_type=self.etc[2]\n if mod_type ==imp.PY_SOURCE:\n self.file=open(self.filename,'r')\n elif mod_type in (imp.PY_COMPILED,imp.C_EXTENSION):\n self.file=open(self.filename,'rb')\n \n def _fix_name(self,fullname):\n if fullname is None :\n fullname=self.fullname\n elif fullname !=self.fullname:\n raise ImportError(\"Loader for module %s cannot handle \"\n \"module %s\"%(self.fullname,fullname))\n return fullname\n \n def is_package(self,fullname):\n fullname=self._fix_name(fullname)\n return self.etc[2]==imp.PKG_DIRECTORY\n \n def get_code(self,fullname=None ):\n fullname=self._fix_name(fullname)\n if self.code is None :\n mod_type=self.etc[2]\n if mod_type ==imp.PY_SOURCE:\n source=self.get_source(fullname)\n self.code=compile(source,self.filename,'exec')\n elif mod_type ==imp.PY_COMPILED:\n self._reopen()\n try :\n self.code=read_code(self.file)\n finally :\n self.file.close()\n elif mod_type ==imp.PKG_DIRECTORY:\n self.code=self._get_delegate().get_code()\n return self.code\n \n def get_source(self,fullname=None ):\n fullname=self._fix_name(fullname)\n if self.source is None :\n mod_type=self.etc[2]\n if mod_type ==imp.PY_SOURCE:\n self._reopen()\n try :\n self.source=self.file.read()\n finally :\n self.file.close()\n elif mod_type ==imp.PY_COMPILED:\n if os.path.exists(self.filename[:-1]):\n with open(self.filename[:-1],'r')as f:\n self.source=f.read()\n elif mod_type ==imp.PKG_DIRECTORY:\n self.source=self._get_delegate().get_source()\n return self.source\n \n def _get_delegate(self):\n finder=ImpImporter(self.filename)\n spec=_get_spec(finder,'__init__')\n return spec.loader\n \n def get_filename(self,fullname=None ):\n fullname=self._fix_name(fullname)\n mod_type=self.etc[2]\n if mod_type ==imp.PKG_DIRECTORY:\n return self._get_delegate().get_filename()\n elif mod_type in (imp.PY_SOURCE,imp.PY_COMPILED,imp.C_EXTENSION):\n return self.filename\n return None\n \n \ntry :\n import zipimport\n from zipimport import zipimporter\n \n def iter_zipimport_modules(importer,prefix=''):\n dirlist=sorted(zipimport._zip_directory_cache[importer.archive])\n _prefix=importer.prefix\n plen=len(_prefix)\n yielded={}\n import inspect\n for fn in dirlist:\n if not fn.startswith(_prefix):\n continue\n \n fn=fn[plen:].split(os.sep)\n \n if len(fn)==2 and fn[1].startswith('__init__.py'):\n if fn[0]not in yielded:\n yielded[fn[0]]=1\n yield prefix+fn[0],True\n \n if len(fn)!=1:\n continue\n \n modname=inspect.getmodulename(fn[0])\n if modname =='__init__':\n continue\n \n if modname and '.'not in modname and modname not in yielded:\n yielded[modname]=1\n yield prefix+modname,False\n \n iter_importer_modules.register(zipimporter,iter_zipimport_modules)\n \nexcept ImportError:\n pass\n \n \ndef get_importer(path_item):\n ''\n\n\n\n\n\n\n \n try :\n importer=sys.path_importer_cache[path_item]\n except KeyError:\n for path_hook in sys.path_hooks:\n try :\n importer=path_hook(path_item)\n sys.path_importer_cache.setdefault(path_item,importer)\n break\n except ImportError:\n pass\n else :\n importer=None\n return importer\n \n \ndef iter_importers(fullname=\"\"):\n ''\n\n\n\n\n\n\n\n\n\n \n if fullname.startswith('.'):\n msg=\"Relative module name {!r} not supported\".format(fullname)\n raise ImportError(msg)\n if '.'in fullname:\n \n pkg_name=fullname.rpartition(\".\")[0]\n pkg=importlib.import_module(pkg_name)\n path=getattr(pkg,'__path__',None )\n if path is None :\n return\n else :\n yield from sys.meta_path\n path=sys.path\n for item in path:\n yield get_importer(item)\n \n \ndef get_loader(module_or_name):\n ''\n\n\n\n\n \n if module_or_name in sys.modules:\n module_or_name=sys.modules[module_or_name]\n if module_or_name is None :\n return None\n if isinstance(module_or_name,ModuleType):\n module=module_or_name\n loader=getattr(module,'__loader__',None )\n if loader is not None :\n return loader\n if getattr(module,'__spec__',None )is None :\n return None\n fullname=module.__name__\n else :\n fullname=module_or_name\n return find_loader(fullname)\n \n \ndef find_loader(fullname):\n ''\n\n\n\n\n \n if fullname.startswith('.'):\n msg=\"Relative module name {!r} not supported\".format(fullname)\n raise ImportError(msg)\n try :\n spec=importlib.util.find_spec(fullname)\n except (ImportError,AttributeError,TypeError,ValueError)as ex:\n \n \n \n msg=\"Error while finding loader for {!r} ({}: {})\"\n raise ImportError(msg.format(fullname,type(ex),ex))from ex\n return spec.loader if spec is not None else None\n \n \ndef extend_path(path,name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if not isinstance(path,list):\n \n \n return path\n \n sname_pkg=name+\".pkg\"\n \n path=path[:]\n \n parent_package,_,final_name=name.rpartition('.')\n if parent_package:\n try :\n search_path=sys.modules[parent_package].__path__\n except (KeyError,AttributeError):\n \n \n return path\n else :\n search_path=sys.path\n \n for dir in search_path:\n if not isinstance(dir,str):\n continue\n \n finder=get_importer(dir)\n if finder is not None :\n portions=[]\n if hasattr(finder,'find_spec'):\n spec=finder.find_spec(final_name)\n if spec is not None :\n portions=spec.submodule_search_locations or []\n \n elif hasattr(finder,'find_loader'):\n _,portions=finder.find_loader(final_name)\n \n for portion in portions:\n \n \n if portion not in path:\n path.append(portion)\n \n \n \n pkgfile=os.path.join(dir,sname_pkg)\n if os.path.isfile(pkgfile):\n try :\n f=open(pkgfile)\n except OSError as msg:\n sys.stderr.write(\"Can't open %s: %s\\n\"%\n (pkgfile,msg))\n else :\n with f:\n for line in f:\n line=line.rstrip('\\n')\n if not line or line.startswith('#'):\n continue\n path.append(line)\n \n return path\n \n \ndef get_data(package,resource):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n spec=importlib.util.find_spec(package)\n if spec is None :\n return None\n loader=spec.loader\n if loader is None or not hasattr(loader,'get_data'):\n return None\n \n mod=(sys.modules.get(package)or\n importlib._bootstrap._load(spec))\n if mod is None or not hasattr(mod,'__file__'):\n return None\n \n \n \n \n parts=resource.split('/')\n parts.insert(0,os.path.dirname(mod.__file__))\n resource_name=os.path.join(*parts)\n return loader.get_data(resource_name)\n \n \n_DOTTED_WORDS=r'(?!\\d)(\\w+)(\\.(?!\\d)(\\w+))*'\n_NAME_PATTERN=re.compile(f'^(?P{_DOTTED_WORDS})(?P:(?P{_DOTTED_WORDS})?)?$',re.U)\ndel _DOTTED_WORDS\n\ndef resolve_name(name):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n m=_NAME_PATTERN.match(name)\n if not m:\n raise ValueError(f'invalid format: {name!r}')\n gd=m.groupdict()\n if gd.get('cln'):\n \n mod=importlib.import_module(gd['pkg'])\n parts=gd.get('obj')\n parts=parts.split('.')if parts else []\n else :\n \n parts=name.split('.')\n modname=parts.pop(0)\n \n mod=importlib.import_module(modname)\n while parts:\n p=parts[0]\n s=f'{modname}.{p}'\n try :\n mod=importlib.import_module(s)\n parts.pop(0)\n modname=s\n except ImportError:\n break\n \n \n \n result=mod\n for p in parts:\n result=getattr(result,p)\n return result\n", ["collections", "functools", "importlib", "importlib.machinery", "importlib.util", "inspect", "marshal", "os", "os.path", "re", "sys", "types", "warnings", "zipimport"]], "platform": [".py", "''\n\n\n\nfrom browser import self as window\n\ndef architecture(*args,**kw):\n return \"\",window.navigator.platform\n \ndef machine(*args,**kw):\n return ''\n \ndef node(*args,**kw):\n return ''\n \ndef platform(*args,**kw):\n return window.navigator.platform\n \ndef processor(*args,**kw):\n return ''\n \ndef python_build():\n return ('.'.join(map(str,__BRYTHON__.implementation[:-1])),\n __BRYTHON__.compiled_date)\n \ndef python_compiler():\n return ''\n \ndef python_branch():\n return ''\n \ndef python_implementation():\n return 'Brython'\n \ndef python_revision():\n return ''\n \ndef python_version():\n return '.'.join(map(str,__BRYTHON__.version_info[:3]))\n \ndef python_version_tuple():\n return __BRYTHON__.version_info[:3]\n \ndef release():\n return ''\n \ndef system():\n return window.navigator.platform\n \ndef system_alias(*args,**kw):\n return window.navigator.platform\n \ndef uname():\n from collections import namedtuple\n klass=namedtuple('uname_result',\n 'system node release version machine processor')\n return klass(window.navigator.platform,'','','','','')\n", ["browser", "collections"]], "posixpath": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ncurdir='.'\npardir='..'\nextsep='.'\nsep='/'\npathsep=':'\ndefpath='/bin:/usr/bin'\naltsep=None\ndevnull='/dev/null'\n\nimport os\nimport sys\nimport stat\nimport genericpath\nfrom genericpath import *\n\n__all__=[\"normcase\",\"isabs\",\"join\",\"splitdrive\",\"split\",\"splitext\",\n\"basename\",\"dirname\",\"commonprefix\",\"getsize\",\"getmtime\",\n\"getatime\",\"getctime\",\"islink\",\"exists\",\"lexists\",\"isdir\",\"isfile\",\n\"ismount\",\"expanduser\",\"expandvars\",\"normpath\",\"abspath\",\n\"samefile\",\"sameopenfile\",\"samestat\",\n\"curdir\",\"pardir\",\"sep\",\"pathsep\",\"defpath\",\"altsep\",\"extsep\",\n\"devnull\",\"realpath\",\"supports_unicode_filenames\",\"relpath\",\n\"commonpath\"]\n\n\ndef _get_sep(path):\n if isinstance(path,bytes):\n return b'/'\n else :\n return '/'\n \n \n \n \n \n \ndef normcase(s):\n ''\n return os.fspath(s)\n \n \n \n \n \ndef isabs(s):\n ''\n s=os.fspath(s)\n sep=_get_sep(s)\n return s.startswith(sep)\n \n \n \n \n \n \ndef join(a,*p):\n ''\n\n\n \n a=os.fspath(a)\n sep=_get_sep(a)\n path=a\n try :\n if not p:\n path[:0]+sep\n for b in map(os.fspath,p):\n if b.startswith(sep):\n path=b\n elif not path or path.endswith(sep):\n path +=b\n else :\n path +=sep+b\n except (TypeError,AttributeError,BytesWarning):\n genericpath._check_arg_types('join',a,*p)\n raise\n return path\n \n \n \n \n \n \n \ndef split(p):\n ''\n \n p=os.fspath(p)\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n head,tail=p[:i],p[i:]\n if head and head !=sep *len(head):\n head=head.rstrip(sep)\n return head,tail\n \n \n \n \n \n \n \ndef splitext(p):\n p=os.fspath(p)\n if isinstance(p,bytes):\n sep=b'/'\n extsep=b'.'\n else :\n sep='/'\n extsep='.'\n return genericpath._splitext(p,sep,None ,extsep)\nsplitext.__doc__=genericpath._splitext.__doc__\n\n\n\n\ndef splitdrive(p):\n ''\n \n p=os.fspath(p)\n return p[:0],p\n \n \n \n \ndef basename(p):\n ''\n p=os.fspath(p)\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n return p[i:]\n \n \n \n \ndef dirname(p):\n ''\n p=os.fspath(p)\n sep=_get_sep(p)\n i=p.rfind(sep)+1\n head=p[:i]\n if head and head !=sep *len(head):\n head=head.rstrip(sep)\n return head\n \n \n \n \n \ndef islink(path):\n ''\n try :\n st=os.lstat(path)\n except (OSError,ValueError,AttributeError):\n return False\n return stat.S_ISLNK(st.st_mode)\n \n \n \ndef lexists(path):\n ''\n try :\n os.lstat(path)\n except (OSError,ValueError):\n return False\n return True\n \n \n \n \n \ndef ismount(path):\n ''\n try :\n s1=os.lstat(path)\n except (OSError,ValueError):\n \n return False\n else :\n \n if stat.S_ISLNK(s1.st_mode):\n return False\n \n if isinstance(path,bytes):\n parent=join(path,b'..')\n else :\n parent=join(path,'..')\n parent=realpath(parent)\n try :\n s2=os.lstat(parent)\n except (OSError,ValueError):\n return False\n \n dev1=s1.st_dev\n dev2=s2.st_dev\n if dev1 !=dev2:\n return True\n ino1=s1.st_ino\n ino2=s2.st_ino\n if ino1 ==ino2:\n return True\n return False\n \n \n \n \n \n \n \n \n \n \n \ndef expanduser(path):\n ''\n \n path=os.fspath(path)\n if isinstance(path,bytes):\n tilde=b'~'\n else :\n tilde='~'\n if not path.startswith(tilde):\n return path\n sep=_get_sep(path)\n i=path.find(sep,1)\n if i <0:\n i=len(path)\n if i ==1:\n if 'HOME'not in os.environ:\n import pwd\n try :\n userhome=pwd.getpwuid(os.getuid()).pw_dir\n except KeyError:\n \n \n return path\n else :\n userhome=os.environ['HOME']\n else :\n import pwd\n name=path[1:i]\n if isinstance(name,bytes):\n name=str(name,'ASCII')\n try :\n pwent=pwd.getpwnam(name)\n except KeyError:\n \n \n return path\n userhome=pwent.pw_dir\n if isinstance(path,bytes):\n userhome=os.fsencode(userhome)\n root=b'/'\n else :\n root='/'\n userhome=userhome.rstrip(root)\n return (userhome+path[i:])or root\n \n \n \n \n \n \n_varprog=None\n_varprogb=None\n\ndef expandvars(path):\n ''\n \n path=os.fspath(path)\n global _varprog,_varprogb\n if isinstance(path,bytes):\n if b'$'not in path:\n return path\n if not _varprogb:\n import re\n _varprogb=re.compile(br'\\$(\\w+|\\{[^}]*\\})',re.ASCII)\n search=_varprogb.search\n start=b'{'\n end=b'}'\n environ=getattr(os,'environb',None )\n else :\n if '$'not in path:\n return path\n if not _varprog:\n import re\n _varprog=re.compile(r'\\$(\\w+|\\{[^}]*\\})',re.ASCII)\n search=_varprog.search\n start='{'\n end='}'\n environ=os.environ\n i=0\n while True :\n m=search(path,i)\n if not m:\n break\n i,j=m.span(0)\n name=m.group(1)\n if name.startswith(start)and name.endswith(end):\n name=name[1:-1]\n try :\n if environ is None :\n value=os.fsencode(os.environ[os.fsdecode(name)])\n else :\n value=environ[name]\n except KeyError:\n i=j\n else :\n tail=path[j:]\n path=path[:i]+value\n i=len(path)\n path +=tail\n return path\n \n \n \n \n \n \ndef normpath(path):\n ''\n path=os.fspath(path)\n if isinstance(path,bytes):\n sep=b'/'\n empty=b''\n dot=b'.'\n dotdot=b'..'\n else :\n sep='/'\n empty=''\n dot='.'\n dotdot='..'\n if path ==empty:\n return dot\n initial_slashes=path.startswith(sep)\n \n \n if (initial_slashes and\n path.startswith(sep *2)and not path.startswith(sep *3)):\n initial_slashes=2\n comps=path.split(sep)\n new_comps=[]\n for comp in comps:\n if comp in (empty,dot):\n continue\n if (comp !=dotdot or (not initial_slashes and not new_comps)or\n (new_comps and new_comps[-1]==dotdot)):\n new_comps.append(comp)\n elif new_comps:\n new_comps.pop()\n comps=new_comps\n path=sep.join(comps)\n if initial_slashes:\n path=sep *initial_slashes+path\n return path or dot\n \n \ndef abspath(path):\n ''\n path=os.fspath(path)\n if not isabs(path):\n if isinstance(path,bytes):\n cwd=os.getcwdb()\n else :\n cwd=os.getcwd()\n path=join(cwd,path)\n return normpath(path)\n \n \n \n \n \ndef realpath(filename):\n ''\n \n filename=os.fspath(filename)\n path,ok=_joinrealpath(filename[:0],filename,{})\n return abspath(path)\n \n \n \ndef _joinrealpath(path,rest,seen):\n if isinstance(path,bytes):\n sep=b'/'\n curdir=b'.'\n pardir=b'..'\n else :\n sep='/'\n curdir='.'\n pardir='..'\n \n if isabs(rest):\n rest=rest[1:]\n path=sep\n \n while rest:\n name,_,rest=rest.partition(sep)\n if not name or name ==curdir:\n \n continue\n if name ==pardir:\n \n if path:\n path,name=split(path)\n if name ==pardir:\n path=join(path,pardir,pardir)\n else :\n path=pardir\n continue\n newpath=join(path,name)\n if not islink(newpath):\n path=newpath\n continue\n \n if newpath in seen:\n \n path=seen[newpath]\n if path is not None :\n \n continue\n \n \n return join(newpath,rest),False\n seen[newpath]=None\n path,ok=_joinrealpath(path,os.readlink(newpath),seen)\n if not ok:\n return join(path,rest),False\n seen[newpath]=path\n \n return path,True\n \n \nsupports_unicode_filenames=(sys.platform =='darwin')\n\ndef relpath(path,start=None ):\n ''\n \n if not path:\n raise ValueError(\"no path specified\")\n \n path=os.fspath(path)\n if isinstance(path,bytes):\n curdir=b'.'\n sep=b'/'\n pardir=b'..'\n else :\n curdir='.'\n sep='/'\n pardir='..'\n \n if start is None :\n start=curdir\n else :\n start=os.fspath(start)\n \n try :\n start_list=[x for x in abspath(start).split(sep)if x]\n path_list=[x for x in abspath(path).split(sep)if x]\n \n i=len(commonprefix([start_list,path_list]))\n \n rel_list=[pardir]*(len(start_list)-i)+path_list[i:]\n if not rel_list:\n return curdir\n return join(*rel_list)\n except (TypeError,AttributeError,BytesWarning,DeprecationWarning):\n genericpath._check_arg_types('relpath',path,start)\n raise\n \n \n \n \n \n \n \ndef commonpath(paths):\n ''\n \n if not paths:\n raise ValueError('commonpath() arg is an empty sequence')\n \n paths=tuple(map(os.fspath,paths))\n if isinstance(paths[0],bytes):\n sep=b'/'\n curdir=b'.'\n else :\n sep='/'\n curdir='.'\n \n try :\n split_paths=[path.split(sep)for path in paths]\n \n try :\n isabs,=set(p[:1]==sep for p in paths)\n except ValueError:\n raise ValueError(\"Can't mix absolute and relative paths\")from None\n \n split_paths=[[c for c in s if c and c !=curdir]for s in split_paths]\n s1=min(split_paths)\n s2=max(split_paths)\n common=s1\n for i,c in enumerate(s1):\n if c !=s2[i]:\n common=s1[:i]\n break\n \n prefix=sep if isabs else sep[:0]\n return prefix+sep.join(common)\n except (TypeError,AttributeError):\n genericpath._check_arg_types('commonpath',*paths)\n raise\n", ["genericpath", "os", "pwd", "re", "stat", "sys"]], "pprint": [".py", "\n\n\n\n\n\n\n\n\n\n\"\"\"Support to pretty-print lists, tuples, & dictionaries recursively.\n\nVery simple, but useful, especially in debugging data structures.\n\nClasses\n-------\n\nPrettyPrinter()\n Handle pretty-printing operations onto a stream using a configured\n set of formatting parameters.\n\nFunctions\n---------\n\npformat()\n Format a Python object into a pretty-printed representation.\n\npprint()\n Pretty-print a Python object to a stream [default is sys.stdout].\n\nsaferepr()\n Generate a 'standard' repr()-like value, but protect against recursive\n data structures.\n\n\"\"\"\n\nimport collections as _collections\nimport re\nimport sys as _sys\nimport types as _types\nfrom io import StringIO as _StringIO\n\n__all__=[\"pprint\",\"pformat\",\"isreadable\",\"isrecursive\",\"saferepr\",\n\"PrettyPrinter\",\"pp\"]\n\n\ndef pprint(object,stream=None ,indent=1,width=80,depth=None ,*,\ncompact=False ,sort_dicts=True ):\n ''\n printer=PrettyPrinter(\n stream=stream,indent=indent,width=width,depth=depth,\n compact=compact,sort_dicts=sort_dicts)\n printer.pprint(object)\n \ndef pformat(object,indent=1,width=80,depth=None ,*,\ncompact=False ,sort_dicts=True ):\n ''\n return PrettyPrinter(indent=indent,width=width,depth=depth,\n compact=compact,sort_dicts=sort_dicts).pformat(object)\n \ndef pp(object,*args,sort_dicts=False ,**kwargs):\n ''\n pprint(object,*args,sort_dicts=sort_dicts,**kwargs)\n \ndef saferepr(object):\n ''\n return _safe_repr(object,{},None ,0,True )[0]\n \ndef isreadable(object):\n ''\n return _safe_repr(object,{},None ,0,True )[1]\n \ndef isrecursive(object):\n ''\n return _safe_repr(object,{},None ,0,True )[2]\n \nclass _safe_key:\n ''\n\n\n\n\n\n\n \n \n __slots__=['obj']\n \n def __init__(self,obj):\n self.obj=obj\n \n def __lt__(self,other):\n try :\n return self.obj = 0')\n if depth is not None and depth <=0:\n raise ValueError('depth must be > 0')\n if not width:\n raise ValueError('width must be != 0')\n self._depth=depth\n self._indent_per_level=indent\n self._width=width\n if stream is not None :\n self._stream=stream\n else :\n self._stream=_sys.stdout\n self._compact=bool(compact)\n self._sort_dicts=sort_dicts\n \n def pprint(self,object):\n self._format(object,self._stream,0,0,{},0)\n self._stream.write(\"\\n\")\n \n def pformat(self,object):\n sio=_StringIO()\n self._format(object,sio,0,0,{},0)\n return sio.getvalue()\n \n def isrecursive(self,object):\n return self.format(object,{},0,0)[2]\n \n def isreadable(self,object):\n s,readable,recursive=self.format(object,{},0,0)\n return readable and not recursive\n \n def _format(self,object,stream,indent,allowance,context,level):\n objid=id(object)\n if objid in context:\n stream.write(_recursion(object))\n self._recursive=True\n self._readable=False\n return\n rep=self._repr(object,context,level)\n max_width=self._width -indent -allowance\n if len(rep)>max_width:\n p=self._dispatch.get(type(object).__repr__,None )\n if p is not None :\n context[objid]=1\n p(self,object,stream,indent,allowance,context,level+1)\n del context[objid]\n return\n elif isinstance(object,dict):\n context[objid]=1\n self._pprint_dict(object,stream,indent,allowance,\n context,level+1)\n del context[objid]\n return\n stream.write(rep)\n \n _dispatch={}\n \n def _pprint_dict(self,object,stream,indent,allowance,context,level):\n write=stream.write\n write('{')\n if self._indent_per_level >1:\n write((self._indent_per_level -1)*' ')\n length=len(object)\n if length:\n if self._sort_dicts:\n items=sorted(object.items(),key=_safe_tuple)\n else :\n items=object.items()\n self._format_dict_items(items,stream,indent,allowance+1,\n context,level)\n write('}')\n \n _dispatch[dict.__repr__]=_pprint_dict\n \n def _pprint_ordered_dict(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'(')\n self._format(list(object.items()),stream,\n indent+len(cls.__name__)+1,allowance+1,\n context,level)\n stream.write(')')\n \n _dispatch[_collections.OrderedDict.__repr__]=_pprint_ordered_dict\n \n def _pprint_list(self,object,stream,indent,allowance,context,level):\n stream.write('[')\n self._format_items(object,stream,indent,allowance+1,\n context,level)\n stream.write(']')\n \n _dispatch[list.__repr__]=_pprint_list\n \n def _pprint_tuple(self,object,stream,indent,allowance,context,level):\n stream.write('(')\n endchar=',)'if len(object)==1 else ')'\n self._format_items(object,stream,indent,allowance+len(endchar),\n context,level)\n stream.write(endchar)\n \n _dispatch[tuple.__repr__]=_pprint_tuple\n \n def _pprint_set(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n typ=object.__class__\n if typ is set:\n stream.write('{')\n endchar='}'\n else :\n stream.write(typ.__name__+'({')\n endchar='})'\n indent +=len(typ.__name__)+1\n object=sorted(object,key=_safe_key)\n self._format_items(object,stream,indent,allowance+len(endchar),\n context,level)\n stream.write(endchar)\n \n _dispatch[set.__repr__]=_pprint_set\n _dispatch[frozenset.__repr__]=_pprint_set\n \n def _pprint_str(self,object,stream,indent,allowance,context,level):\n write=stream.write\n if not len(object):\n write(repr(object))\n return\n chunks=[]\n lines=object.splitlines(True )\n if level ==1:\n indent +=1\n allowance +=1\n max_width1=max_width=self._width -indent\n for i,line in enumerate(lines):\n rep=repr(line)\n if i ==len(lines)-1:\n max_width1 -=allowance\n if len(rep)<=max_width1:\n chunks.append(rep)\n else :\n \n parts=re.findall(r'\\S*\\s*',line)\n assert parts\n assert not parts[-1]\n parts.pop()\n max_width2=max_width\n current=''\n for j,part in enumerate(parts):\n candidate=current+part\n if j ==len(parts)-1 and i ==len(lines)-1:\n max_width2 -=allowance\n if len(repr(candidate))>max_width2:\n if current:\n chunks.append(repr(current))\n current=part\n else :\n current=candidate\n if current:\n chunks.append(repr(current))\n if len(chunks)==1:\n write(rep)\n return\n if level ==1:\n write('(')\n for i,rep in enumerate(chunks):\n if i >0:\n write('\\n'+' '*indent)\n write(rep)\n if level ==1:\n write(')')\n \n _dispatch[str.__repr__]=_pprint_str\n \n def _pprint_bytes(self,object,stream,indent,allowance,context,level):\n write=stream.write\n if len(object)<=4:\n write(repr(object))\n return\n parens=level ==1\n if parens:\n indent +=1\n allowance +=1\n write('(')\n delim=''\n for rep in _wrap_bytes_repr(object,self._width -indent,allowance):\n write(delim)\n write(rep)\n if not delim:\n delim='\\n'+' '*indent\n if parens:\n write(')')\n \n _dispatch[bytes.__repr__]=_pprint_bytes\n \n def _pprint_bytearray(self,object,stream,indent,allowance,context,level):\n write=stream.write\n write('bytearray(')\n self._pprint_bytes(bytes(object),stream,indent+10,\n allowance+1,context,level+1)\n write(')')\n \n _dispatch[bytearray.__repr__]=_pprint_bytearray\n \n def _pprint_mappingproxy(self,object,stream,indent,allowance,context,level):\n stream.write('mappingproxy(')\n self._format(object.copy(),stream,indent+13,allowance+1,\n context,level)\n stream.write(')')\n \n _dispatch[_types.MappingProxyType.__repr__]=_pprint_mappingproxy\n \n def _pprint_simplenamespace(self,object,stream,indent,allowance,context,level):\n if type(object)is _types.SimpleNamespace:\n \n \n cls_name='namespace'\n else :\n cls_name=object.__class__.__name__\n indent +=len(cls_name)+1\n delimnl=',\\n'+' '*indent\n items=object.__dict__.items()\n last_index=len(items)-1\n \n stream.write(cls_name+'(')\n for i,(key,ent)in enumerate(items):\n stream.write(key)\n stream.write('=')\n \n last=i ==last_index\n self._format(ent,stream,indent+len(key)+1,\n allowance if last else 1,\n context,level)\n if not last:\n stream.write(delimnl)\n stream.write(')')\n \n _dispatch[_types.SimpleNamespace.__repr__]=_pprint_simplenamespace\n \n def _format_dict_items(self,items,stream,indent,allowance,context,\n level):\n write=stream.write\n indent +=self._indent_per_level\n delimnl=',\\n'+' '*indent\n last_index=len(items)-1\n for i,(key,ent)in enumerate(items):\n last=i ==last_index\n rep=self._repr(key,context,level)\n write(rep)\n write(': ')\n self._format(ent,stream,indent+len(rep)+2,\n allowance if last else 1,\n context,level)\n if not last:\n write(delimnl)\n \n def _format_items(self,items,stream,indent,allowance,context,level):\n write=stream.write\n indent +=self._indent_per_level\n if self._indent_per_level >1:\n write((self._indent_per_level -1)*' ')\n delimnl=',\\n'+' '*indent\n delim=''\n width=max_width=self._width -indent+1\n it=iter(items)\n try :\n next_ent=next(it)\n except StopIteration:\n return\n last=False\n while not last:\n ent=next_ent\n try :\n next_ent=next(it)\n except StopIteration:\n last=True\n max_width -=allowance\n width -=allowance\n if self._compact:\n rep=self._repr(ent,context,level)\n w=len(rep)+2\n if width =w:\n width -=w\n write(delim)\n delim=', '\n write(rep)\n continue\n write(delim)\n delim=delimnl\n self._format(ent,stream,indent,\n allowance if last else 1,\n context,level)\n \n def _repr(self,object,context,level):\n repr,readable,recursive=self.format(object,context.copy(),\n self._depth,level)\n if not readable:\n self._readable=False\n if recursive:\n self._recursive=True\n return repr\n \n def format(self,object,context,maxlevels,level):\n ''\n\n\n \n return _safe_repr(object,context,maxlevels,level,self._sort_dicts)\n \n def _pprint_default_dict(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n rdf=self._repr(object.default_factory,context,level)\n cls=object.__class__\n indent +=len(cls.__name__)+1\n stream.write('%s(%s,\\n%s'%(cls.__name__,rdf,' '*indent))\n self._pprint_dict(object,stream,indent,allowance+1,context,level)\n stream.write(')')\n \n _dispatch[_collections.defaultdict.__repr__]=_pprint_default_dict\n \n def _pprint_counter(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'({')\n if self._indent_per_level >1:\n stream.write((self._indent_per_level -1)*' ')\n items=object.most_common()\n self._format_dict_items(items,stream,\n indent+len(cls.__name__)+1,allowance+2,\n context,level)\n stream.write('})')\n \n _dispatch[_collections.Counter.__repr__]=_pprint_counter\n \n def _pprint_chain_map(self,object,stream,indent,allowance,context,level):\n if not len(object.maps):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'(')\n indent +=len(cls.__name__)+1\n for i,m in enumerate(object.maps):\n if i ==len(object.maps)-1:\n self._format(m,stream,indent,allowance+1,context,level)\n stream.write(')')\n else :\n self._format(m,stream,indent,1,context,level)\n stream.write(',\\n'+' '*indent)\n \n _dispatch[_collections.ChainMap.__repr__]=_pprint_chain_map\n \n def _pprint_deque(self,object,stream,indent,allowance,context,level):\n if not len(object):\n stream.write(repr(object))\n return\n cls=object.__class__\n stream.write(cls.__name__+'(')\n indent +=len(cls.__name__)+1\n stream.write('[')\n if object.maxlen is None :\n self._format_items(object,stream,indent,allowance+2,\n context,level)\n stream.write('])')\n else :\n self._format_items(object,stream,indent,2,\n context,level)\n rml=self._repr(object.maxlen,context,level)\n stream.write('],\\n%smaxlen=%s)'%(' '*indent,rml))\n \n _dispatch[_collections.deque.__repr__]=_pprint_deque\n \n def _pprint_user_dict(self,object,stream,indent,allowance,context,level):\n self._format(object.data,stream,indent,allowance,context,level -1)\n \n _dispatch[_collections.UserDict.__repr__]=_pprint_user_dict\n \n def _pprint_user_list(self,object,stream,indent,allowance,context,level):\n self._format(object.data,stream,indent,allowance,context,level -1)\n \n _dispatch[_collections.UserList.__repr__]=_pprint_user_list\n \n def _pprint_user_string(self,object,stream,indent,allowance,context,level):\n self._format(object.data,stream,indent,allowance,context,level -1)\n \n _dispatch[_collections.UserString.__repr__]=_pprint_user_string\n \n \n \ndef _safe_repr(object,context,maxlevels,level,sort_dicts):\n typ=type(object)\n if typ in _builtin_scalars:\n return repr(object),True ,False\n \n r=getattr(typ,\"__repr__\",None )\n if issubclass(typ,dict)and r is dict.__repr__:\n if not object:\n return \"{}\",True ,False\n objid=id(object)\n if maxlevels and level >=maxlevels:\n return \"{...}\",False ,objid in context\n if objid in context:\n return _recursion(object),False ,True\n context[objid]=1\n readable=True\n recursive=False\n components=[]\n append=components.append\n level +=1\n if sort_dicts:\n items=sorted(object.items(),key=_safe_tuple)\n else :\n items=object.items()\n for k,v in items:\n krepr,kreadable,krecur=_safe_repr(k,context,maxlevels,level,sort_dicts)\n vrepr,vreadable,vrecur=_safe_repr(v,context,maxlevels,level,sort_dicts)\n append(\"%s: %s\"%(krepr,vrepr))\n readable=readable and kreadable and vreadable\n if krecur or vrecur:\n recursive=True\n del context[objid]\n return \"{%s}\"%\", \".join(components),readable,recursive\n \n if (issubclass(typ,list)and r is list.__repr__)or\\\n (issubclass(typ,tuple)and r is tuple.__repr__):\n if issubclass(typ,list):\n if not object:\n return \"[]\",True ,False\n format=\"[%s]\"\n elif len(object)==1:\n format=\"(%s,)\"\n else :\n if not object:\n return \"()\",True ,False\n format=\"(%s)\"\n objid=id(object)\n if maxlevels and level >=maxlevels:\n return format %\"...\",False ,objid in context\n if objid in context:\n return _recursion(object),False ,True\n context[objid]=1\n readable=True\n recursive=False\n components=[]\n append=components.append\n level +=1\n for o in object:\n orepr,oreadable,orecur=_safe_repr(o,context,maxlevels,level,sort_dicts)\n append(orepr)\n if not oreadable:\n readable=False\n if orecur:\n recursive=True\n del context[objid]\n return format %\", \".join(components),readable,recursive\n \n rep=repr(object)\n return rep,(rep and not rep.startswith('<')),False\n \n_builtin_scalars=frozenset({str,bytes,bytearray,int,float,complex,\nbool,type(None )})\n\ndef _recursion(object):\n return (\"\"\n %(type(object).__name__,id(object)))\n \n \ndef _perfcheck(object=None ):\n import time\n if object is None :\n object=[(\"string\",(1,2),[3,4],{5:6,7:8})]*100000\n p=PrettyPrinter()\n t1=time.perf_counter()\n _safe_repr(object,{},None ,0,True )\n t2=time.perf_counter()\n p.pformat(object)\n t3=time.perf_counter()\n print(\"_safe_repr:\",t2 -t1)\n print(\"pformat:\",t3 -t2)\n \ndef _wrap_bytes_repr(object,width,allowance):\n current=b''\n last=len(object)//4 *4\n for i in range(0,len(object),4):\n part=object[i:i+4]\n candidate=current+part\n if i ==last:\n width -=allowance\n if len(repr(candidate))>width:\n if current:\n yield repr(current)\n current=part\n else :\n current=candidate\n if current:\n yield repr(current)\n \nif __name__ ==\"__main__\":\n _perfcheck()\n", ["collections", "io", "re", "sys", "time", "types"]], "profile": [".py", "#! /usr/bin/env python3\n\n\n\n\n\n\n\n\"\"\"Class for profiling Python code.\"\"\"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport time\nimport marshal\n\n__all__=[\"run\",\"runctx\",\"Profile\"]\n\n\n\n\n\n\n\n\n\nclass _Utils:\n ''\n\n\n \n \n def __init__(self,profiler):\n self.profiler=profiler\n \n def run(self,statement,filename,sort):\n prof=self.profiler()\n try :\n prof.run(statement)\n except SystemExit:\n pass\n finally :\n self._show(prof,filename,sort)\n \n def runctx(self,statement,globals,locals,filename,sort):\n prof=self.profiler()\n try :\n prof.runctx(statement,globals,locals)\n except SystemExit:\n pass\n finally :\n self._show(prof,filename,sort)\n \n def _show(self,prof,filename,sort):\n if filename is not None :\n prof.dump_stats(filename)\n else :\n prof.print_stats(sort)\n \n \n \n \n \n \n \ndef run(statement,filename=None ,sort=-1):\n ''\n\n\n\n\n\n\n\n\n \n return _Utils(Profile).run(statement,filename,sort)\n \ndef runctx(statement,globals,locals,filename=None ,sort=-1):\n ''\n\n\n\n \n return _Utils(Profile).runctx(statement,globals,locals,filename,sort)\n \n \nclass Profile:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n bias=0\n \n def __init__(self,timer=None ,bias=None ):\n self.timings={}\n self.cur=None\n self.cmd=\"\"\n self.c_func_name=\"\"\n \n if bias is None :\n bias=self.bias\n self.bias=bias\n \n if not timer:\n self.timer=self.get_time=time.process_time\n self.dispatcher=self.trace_dispatch_i\n else :\n self.timer=timer\n t=self.timer()\n try :\n length=len(t)\n except TypeError:\n self.get_time=timer\n self.dispatcher=self.trace_dispatch_i\n else :\n if length ==2:\n self.dispatcher=self.trace_dispatch\n else :\n self.dispatcher=self.trace_dispatch_l\n \n \n \n \n \n def get_time_timer(timer=timer,sum=sum):\n return sum(timer())\n self.get_time=get_time_timer\n self.t=self.get_time()\n self.simulate_call('profiler')\n \n \n \n def trace_dispatch(self,frame,event,arg):\n timer=self.timer\n t=timer()\n t=t[0]+t[1]-self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n t=timer()\n self.t=t[0]+t[1]\n else :\n r=timer()\n self.t=r[0]+r[1]-t\n \n \n \n \n def trace_dispatch_i(self,frame,event,arg):\n timer=self.timer\n t=timer()-self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n self.t=timer()\n else :\n self.t=timer()-t\n \n \n \n \n def trace_dispatch_mac(self,frame,event,arg):\n timer=self.timer\n t=timer()/60.0 -self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n self.t=timer()/60.0\n else :\n self.t=timer()/60.0 -t\n \n \n \n def trace_dispatch_l(self,frame,event,arg):\n get_time=self.get_time\n t=get_time()-self.t -self.bias\n \n if event ==\"c_call\":\n self.c_func_name=arg.__name__\n \n if self.dispatch[event](self,frame,t):\n self.t=get_time()\n else :\n self.t=get_time()-t\n \n \n \n \n \n \n \n \n def trace_dispatch_exception(self,frame,t):\n rpt,rit,ret,rfn,rframe,rcur=self.cur\n if (rframe is not frame)and rcur:\n return self.trace_dispatch_return(rframe,t)\n self.cur=rpt,rit+t,ret,rfn,rframe,rcur\n return 1\n \n \n def trace_dispatch_call(self,frame,t):\n if self.cur and frame.f_back is not self.cur[-2]:\n rpt,rit,ret,rfn,rframe,rcur=self.cur\n if not isinstance(rframe,Profile.fake_frame):\n assert rframe.f_back is frame.f_back,(\"Bad call\",rfn,\n rframe,rframe.f_back,\n frame,frame.f_back)\n self.trace_dispatch_return(rframe,0)\n assert (self.cur is None or\\\n frame.f_back is self.cur[-2]),(\"Bad call\",\n self.cur[-3])\n fcode=frame.f_code\n fn=(fcode.co_filename,fcode.co_firstlineno,fcode.co_name)\n self.cur=(t,0,0,fn,frame,self.cur)\n timings=self.timings\n if fn in timings:\n cc,ns,tt,ct,callers=timings[fn]\n timings[fn]=cc,ns+1,tt,ct,callers\n else :\n timings[fn]=0,0,0,0,{}\n return 1\n \n def trace_dispatch_c_call(self,frame,t):\n fn=(\"\",0,self.c_func_name)\n self.cur=(t,0,0,fn,frame,self.cur)\n timings=self.timings\n if fn in timings:\n cc,ns,tt,ct,callers=timings[fn]\n timings[fn]=cc,ns+1,tt,ct,callers\n else :\n timings[fn]=0,0,0,0,{}\n return 1\n \n def trace_dispatch_return(self,frame,t):\n if frame is not self.cur[-2]:\n assert frame is self.cur[-2].f_back,(\"Bad return\",self.cur[-3])\n self.trace_dispatch_return(self.cur[-2],0)\n \n \n \n \n rpt,rit,ret,rfn,frame,rcur=self.cur\n rit=rit+t\n frame_total=rit+ret\n \n ppt,pit,pet,pfn,pframe,pcur=rcur\n self.cur=ppt,pit+rpt,pet+frame_total,pfn,pframe,pcur\n \n timings=self.timings\n cc,ns,tt,ct,callers=timings[rfn]\n if not ns:\n \n \n \n \n ct=ct+frame_total\n cc=cc+1\n \n if pfn in callers:\n callers[pfn]=callers[pfn]+1\n \n \n \n else :\n callers[pfn]=1\n \n timings[rfn]=cc,ns -1,tt+rit,ct,callers\n \n return 1\n \n \n dispatch={\n \"call\":trace_dispatch_call,\n \"exception\":trace_dispatch_exception,\n \"return\":trace_dispatch_return,\n \"c_call\":trace_dispatch_c_call,\n \"c_exception\":trace_dispatch_return,\n \"c_return\":trace_dispatch_return,\n }\n \n \n \n \n \n \n \n \n def set_cmd(self,cmd):\n if self.cur[-1]:return\n self.cmd=cmd\n self.simulate_call(cmd)\n \n class fake_code:\n def __init__(self,filename,line,name):\n self.co_filename=filename\n self.co_line=line\n self.co_name=name\n self.co_firstlineno=0\n \n def __repr__(self):\n return repr((self.co_filename,self.co_line,self.co_name))\n \n class fake_frame:\n def __init__(self,code,prior):\n self.f_code=code\n self.f_back=prior\n \n def simulate_call(self,name):\n code=self.fake_code('profile',0,name)\n if self.cur:\n pframe=self.cur[-2]\n else :\n pframe=None\n frame=self.fake_frame(code,pframe)\n self.dispatch['call'](self,frame,0)\n \n \n \n \n def simulate_cmd_complete(self):\n get_time=self.get_time\n t=get_time()-self.t\n while self.cur[-1]:\n \n \n self.dispatch['return'](self,self.cur[-2],t)\n t=0\n self.t=get_time()-t\n \n \n def print_stats(self,sort=-1):\n import pstats\n pstats.Stats(self).strip_dirs().sort_stats(sort).\\\n print_stats()\n \n def dump_stats(self,file):\n with open(file,'wb')as f:\n self.create_stats()\n marshal.dump(self.stats,f)\n \n def create_stats(self):\n self.simulate_cmd_complete()\n self.snapshot_stats()\n \n def snapshot_stats(self):\n self.stats={}\n for func,(cc,ns,tt,ct,callers)in self.timings.items():\n callers=callers.copy()\n nc=0\n for callcnt in callers.values():\n nc +=callcnt\n self.stats[func]=cc,nc,tt,ct,callers\n \n \n \n \n \n def run(self,cmd):\n import __main__\n dict=__main__.__dict__\n return self.runctx(cmd,dict,dict)\n \n def runctx(self,cmd,globals,locals):\n self.set_cmd(cmd)\n sys.setprofile(self.dispatcher)\n try :\n exec(cmd,globals,locals)\n finally :\n sys.setprofile(None )\n return self\n \n \n def runcall(self,func,/,*args,**kw):\n self.set_cmd(repr(func))\n sys.setprofile(self.dispatcher)\n try :\n return func(*args,**kw)\n finally :\n sys.setprofile(None )\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n def calibrate(self,m,verbose=0):\n if self.__class__ is not Profile:\n raise TypeError(\"Subclasses must override .calibrate().\")\n \n saved_bias=self.bias\n self.bias=0\n try :\n return self._calibrate_inner(m,verbose)\n finally :\n self.bias=saved_bias\n \n def _calibrate_inner(self,m,verbose):\n get_time=self.get_time\n \n \n \n \n \n \n \n def f1(n):\n for i in range(n):\n x=1\n \n def f(m,f1=f1):\n for i in range(m):\n f1(100)\n \n f(m)\n \n \n t0=get_time()\n f(m)\n t1=get_time()\n elapsed_noprofile=t1 -t0\n if verbose:\n print(\"elapsed time without profiling =\",elapsed_noprofile)\n \n \n \n \n p=Profile()\n t0=get_time()\n p.runctx('f(m)',globals(),locals())\n t1=get_time()\n elapsed_profile=t1 -t0\n if verbose:\n print(\"elapsed time with profiling =\",elapsed_profile)\n \n \n total_calls=0.0\n reported_time=0.0\n for (filename,line,funcname),(cc,ns,tt,ct,callers)in\\\n p.timings.items():\n if funcname in (\"f\",\"f1\"):\n total_calls +=cc\n reported_time +=tt\n \n if verbose:\n print(\"'CPU seconds' profiler reported =\",reported_time)\n print(\"total # calls =\",total_calls)\n if total_calls !=m+1:\n raise ValueError(\"internal error: total calls = %d\"%total_calls)\n \n \n \n \n \n mean=(reported_time -elapsed_noprofile)/2.0 /total_calls\n if verbose:\n print(\"mean stopwatch overhead per profile event =\",mean)\n return mean\n \n \n \ndef main():\n import os\n from optparse import OptionParser\n \n usage=\"profile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ...\"\n parser=OptionParser(usage=usage)\n parser.allow_interspersed_args=False\n parser.add_option('-o','--outfile',dest=\"outfile\",\n help=\"Save stats to \",default=None )\n parser.add_option('-m',dest=\"module\",action=\"store_true\",\n help=\"Profile a library module.\",default=False )\n parser.add_option('-s','--sort',dest=\"sort\",\n help=\"Sort order when printing to stdout, based on pstats.Stats class\",\n default=-1)\n \n if not sys.argv[1:]:\n parser.print_usage()\n sys.exit(2)\n \n (options,args)=parser.parse_args()\n sys.argv[:]=args\n \n if len(args)>0:\n if options.module:\n import runpy\n code=\"run_module(modname, run_name='__main__')\"\n globs={\n 'run_module':runpy.run_module,\n 'modname':args[0]\n }\n else :\n progname=args[0]\n sys.path.insert(0,os.path.dirname(progname))\n with open(progname,'rb')as fp:\n code=compile(fp.read(),progname,'exec')\n globs={\n '__file__':progname,\n '__name__':'__main__',\n '__package__':None ,\n '__cached__':None ,\n }\n runctx(code,globs,None ,options.outfile,options.sort)\n else :\n parser.print_usage()\n return parser\n \n \nif __name__ =='__main__':\n main()\n", ["__main__", "marshal", "optparse", "os", "pstats", "runpy", "sys", "time"]], "pwd": [".py", "\ndef getpwuid():\n pass\n", []], "pydoc": [".py", "#!/usr/bin/env python3\n''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['help']\n__author__=\"Ka-Ping Yee \"\n__date__=\"26 February 2001\"\n\n__credits__=\"\"\"Guido van Rossum, for an excellent programming language.\nTommy Burnette, the original creator of manpy.\nPaul Prescod, for all his work on onlinehelp.\nRichard Chamberlain, for the first implementation of textdoc.\n\"\"\"\n\n\n\n\n\n\n\n\nimport builtins\nimport importlib._bootstrap\nimport importlib._bootstrap_external\nimport importlib.machinery\nimport importlib.util\nimport inspect\nimport io\nimport os\nimport pkgutil\nimport platform\nimport re\nimport sys\nimport sysconfig\nimport time\nimport tokenize\nimport urllib.parse\nimport warnings\nfrom collections import deque\nfrom reprlib import Repr\nfrom traceback import format_exception_only\n\n\n\n\ndef pathdirs():\n ''\n dirs=[]\n normdirs=[]\n for dir in sys.path:\n dir=os.path.abspath(dir or '.')\n normdir=os.path.normcase(dir)\n if normdir not in normdirs and os.path.isdir(dir):\n dirs.append(dir)\n normdirs.append(normdir)\n return dirs\n \ndef _findclass(func):\n cls=sys.modules.get(func.__module__)\n if cls is None :\n return None\n for name in func.__qualname__.split('.')[:-1]:\n cls=getattr(cls,name)\n if not inspect.isclass(cls):\n return None\n return cls\n \ndef _finddoc(obj):\n if inspect.ismethod(obj):\n name=obj.__func__.__name__\n self=obj.__self__\n if (inspect.isclass(self)and\n getattr(getattr(self,name,None ),'__func__')is obj.__func__):\n \n cls=self\n else :\n cls=self.__class__\n elif inspect.isfunction(obj):\n name=obj.__name__\n cls=_findclass(obj)\n if cls is None or getattr(cls,name)is not obj:\n return None\n elif inspect.isbuiltin(obj):\n name=obj.__name__\n self=obj.__self__\n if (inspect.isclass(self)and\n self.__qualname__+'.'+name ==obj.__qualname__):\n \n cls=self\n else :\n cls=self.__class__\n \n elif isinstance(obj,property):\n func=obj.fget\n name=func.__name__\n cls=_findclass(func)\n if cls is None or getattr(cls,name)is not obj:\n return None\n elif inspect.ismethoddescriptor(obj)or inspect.isdatadescriptor(obj):\n name=obj.__name__\n cls=obj.__objclass__\n if getattr(cls,name)is not obj:\n return None\n if inspect.ismemberdescriptor(obj):\n slots=getattr(cls,'__slots__',None )\n if isinstance(slots,dict)and name in slots:\n return slots[name]\n else :\n return None\n for base in cls.__mro__:\n try :\n doc=_getowndoc(getattr(base,name))\n except AttributeError:\n continue\n if doc is not None :\n return doc\n return None\n \ndef _getowndoc(obj):\n ''\n \n try :\n doc=object.__getattribute__(obj,'__doc__')\n if doc is None :\n return None\n if obj is not type:\n typedoc=type(obj).__doc__\n if isinstance(typedoc,str)and typedoc ==doc:\n return None\n return doc\n except AttributeError:\n return None\n \ndef _getdoc(object):\n ''\n\n\n\n \n doc=_getowndoc(object)\n if doc is None :\n try :\n doc=_finddoc(object)\n except (AttributeError,TypeError):\n return None\n if not isinstance(doc,str):\n return None\n return inspect.cleandoc(doc)\n \ndef getdoc(object):\n ''\n result=_getdoc(object)or inspect.getcomments(object)\n return result and re.sub('^ *\\n','',result.rstrip())or ''\n \ndef splitdoc(doc):\n ''\n lines=doc.strip().split('\\n')\n if len(lines)==1:\n return lines[0],''\n elif len(lines)>=2 and not lines[1].rstrip():\n return lines[0],'\\n'.join(lines[2:])\n return '','\\n'.join(lines)\n \ndef classname(object,modname):\n ''\n name=object.__name__\n if object.__module__ !=modname:\n name=object.__module__+'.'+name\n return name\n \ndef isdata(object):\n ''\n return not (inspect.ismodule(object)or inspect.isclass(object)or\n inspect.isroutine(object)or inspect.isframe(object)or\n inspect.istraceback(object)or inspect.iscode(object))\n \ndef replace(text,*pairs):\n ''\n while pairs:\n text=pairs[1].join(text.split(pairs[0]))\n pairs=pairs[2:]\n return text\n \ndef cram(text,maxlen):\n ''\n if len(text)>maxlen:\n pre=max(0,(maxlen -3)//2)\n post=max(0,maxlen -3 -pre)\n return text[:pre]+'...'+text[len(text)-post:]\n return text\n \n_re_stripid=re.compile(r' at 0x[0-9a-f]{6,16}(>+)$',re.IGNORECASE)\ndef stripid(text):\n ''\n \n return _re_stripid.sub(r'\\1',text)\n \ndef _is_bound_method(fn):\n ''\n\n\n \n if inspect.ismethod(fn):\n return True\n if inspect.isbuiltin(fn):\n self=getattr(fn,'__self__',None )\n return not (inspect.ismodule(self)or (self is None ))\n return False\n \n \ndef allmethods(cl):\n methods={}\n for key,value in inspect.getmembers(cl,inspect.isroutine):\n methods[key]=1\n for base in cl.__bases__:\n methods.update(allmethods(base))\n for key in methods.keys():\n methods[key]=getattr(cl,key)\n return methods\n \ndef _split_list(s,predicate):\n ''\n\n\n\n\n \n \n yes=[]\n no=[]\n for x in s:\n if predicate(x):\n yes.append(x)\n else :\n no.append(x)\n return yes,no\n \ndef visiblename(name,all=None ,obj=None ):\n ''\n \n \n if name in {'__author__','__builtins__','__cached__','__credits__',\n '__date__','__doc__','__file__','__spec__',\n '__loader__','__module__','__name__','__package__',\n '__path__','__qualname__','__slots__','__version__'}:\n return 0\n \n if name.startswith('__')and name.endswith('__'):return 1\n \n if name.startswith('_')and hasattr(obj,'_fields'):\n return True\n if all is not None :\n \n return name in all\n else :\n return not name.startswith('_')\n \ndef classify_class_attrs(object):\n ''\n results=[]\n for (name,kind,cls,value)in inspect.classify_class_attrs(object):\n if inspect.isdatadescriptor(value):\n kind='data descriptor'\n if isinstance(value,property)and value.fset is None :\n kind='readonly property'\n results.append((name,kind,cls,value))\n return results\n \ndef sort_attributes(attrs,object):\n ''\n \n \n fields=getattr(object,'_fields',[])\n try :\n field_order={name:i -len(fields)for (i,name)in enumerate(fields)}\n except TypeError:\n field_order={}\n keyfunc=lambda attr:(field_order.get(attr[0],0),attr[0])\n attrs.sort(key=keyfunc)\n \n \n \ndef ispackage(path):\n ''\n if os.path.isdir(path):\n for ext in ('.py','.pyc'):\n if os.path.isfile(os.path.join(path,'__init__'+ext)):\n return True\n return False\n \ndef source_synopsis(file):\n line=file.readline()\n while line[:1]=='#'or not line.strip():\n line=file.readline()\n if not line:break\n line=line.strip()\n if line[:4]=='r\"\"\"':line=line[1:]\n if line[:3]=='\"\"\"':\n line=line[3:]\n if line[-1:]=='\\\\':line=line[:-1]\n while not line.strip():\n line=file.readline()\n if not line:break\n result=line.split('\"\"\"')[0].strip()\n else :result=None\n return result\n \ndef synopsis(filename,cache={}):\n ''\n mtime=os.stat(filename).st_mtime\n lastupdate,result=cache.get(filename,(None ,None ))\n if lastupdate is None or lastupdate ','>')\n \n def repr(self,object):\n return Repr.repr(self,object)\n \n def repr1(self,x,level):\n if hasattr(type(x),'__name__'):\n methodname='repr_'+'_'.join(type(x).__name__.split())\n if hasattr(self,methodname):\n return getattr(self,methodname)(x,level)\n return self.escape(cram(stripid(repr(x)),self.maxother))\n \n def repr_string(self,x,level):\n test=cram(x,self.maxstring)\n testrepr=repr(test)\n if '\\\\'in test and '\\\\'not in replace(testrepr,r'\\\\',''):\n \n \n return 'r'+testrepr[0]+self.escape(test)+testrepr[0]\n return re.sub(r'((\\\\[\\\\abfnrtv\\'\"]|\\\\[0-9]..|\\\\x..|\\\\u....)+)',\n r'\\1',\n self.escape(testrepr))\n \n repr_str=repr_string\n \n def repr_instance(self,x,level):\n try :\n return self.escape(cram(stripid(repr(x)),self.maxstring))\n except :\n return self.escape('<%s instance>'%x.__class__.__name__)\n \n repr_unicode=repr_string\n \nclass HTMLDoc(Doc):\n ''\n \n \n \n _repr_instance=HTMLRepr()\n repr=_repr_instance.repr\n escape=_repr_instance.escape\n \n def page(self,title,contents):\n ''\n return '''\\\n\nPython: %s\n\n\n%s\n'''%(title,contents)\n \n def heading(self,title,fgcol,bgcol,extras=''):\n ''\n return '''\n\n\n
 
\n 
%s
%s
\n '''%(bgcol,fgcol,title,fgcol,extras or ' ')\n \n def section(self,title,fgcol,bgcol,contents,width=6,\n prelude='',marginalia=None ,gap=' '):\n ''\n if marginalia is None :\n marginalia=''+' '*width+''\n result='''

\n\n\n\n '''%(bgcol,fgcol,title)\n if prelude:\n result=result+'''\n\n\n'''%(bgcol,marginalia,prelude,gap)\n else :\n result=result+'''\n'''%(bgcol,marginalia,gap)\n \n return result+'\\n
 
\n%s
%s%s
%s
%s%s%s
'%contents\n \n def bigsection(self,title,*args):\n ''\n title='%s'%title\n return self.section(title,*args)\n \n def preformat(self,text):\n ''\n text=self.escape(text.expandtabs())\n return replace(text,'\\n\\n','\\n \\n','\\n\\n','\\n \\n',\n ' ',' ','\\n','
\\n')\n \n def multicolumn(self,list,format,cols=4):\n ''\n result=''\n rows=(len(list)+cols -1)//cols\n for col in range(cols):\n result=result+''%(100 //cols)\n for i in range(rows *col,rows *col+rows):\n if i \\n'\n result=result+''\n return '%s
'%result\n \n def grey(self,text):return '%s'%text\n \n def namelink(self,name,*dicts):\n ''\n for dict in dicts:\n if name in dict:\n return '
%s'%(dict[name],name)\n return name\n \n def classlink(self,object,modname):\n ''\n name,module=object.__name__,sys.modules.get(object.__module__)\n if hasattr(module,name)and getattr(module,name)is object:\n return '%s'%(\n module.__name__,name,classname(object,modname))\n return classname(object,modname)\n \n def modulelink(self,object):\n ''\n return '%s'%(object.__name__,object.__name__)\n \n def modpkglink(self,modpkginfo):\n ''\n name,path,ispackage,shadowed=modpkginfo\n if shadowed:\n return self.grey(name)\n if path:\n url='%s.%s.html'%(path,name)\n else :\n url='%s.html'%name\n if ispackage:\n text='%s (package)'%name\n else :\n text=name\n return '%s'%(url,text)\n \n def filelink(self,url,path):\n ''\n return '%s'%(url,path)\n \n def markup(self,text,escape=None ,funcs={},classes={},methods={}):\n ''\n \n escape=escape or self.escape\n results=[]\n here=0\n pattern=re.compile(r'\\b((http|https|ftp)://\\S+[\\w/]|'\n r'RFC[- ]?(\\d+)|'\n r'PEP[- ]?(\\d+)|'\n r'(self\\.)?(\\w+))')\n while True :\n match=pattern.search(text,here)\n if not match:break\n start,end=match.span()\n results.append(escape(text[here:start]))\n \n all,scheme,rfc,pep,selfdot,name=match.groups()\n if scheme:\n url=escape(all).replace('\"','"')\n results.append('%s'%(url,url))\n elif rfc:\n url='http://www.rfc-editor.org/rfc/rfc%d.txt'%int(rfc)\n results.append('%s'%(url,escape(all)))\n elif pep:\n url='http://www.python.org/dev/peps/pep-%04d/'%int(pep)\n results.append('%s'%(url,escape(all)))\n elif selfdot:\n \n \n if text[end:end+1]=='(':\n results.append('self.'+self.namelink(name,methods))\n else :\n results.append('self.%s'%name)\n elif text[end:end+1]=='(':\n results.append(self.namelink(name,methods,funcs,classes))\n else :\n results.append(self.namelink(name,classes))\n here=end\n results.append(escape(text[here:]))\n return ''.join(results)\n \n \n \n def formattree(self,tree,modname,parent=None ):\n ''\n result=''\n for entry in tree:\n if type(entry)is type(()):\n c,bases=entry\n result=result+'

'\n result=result+self.classlink(c,modname)\n if bases and bases !=(parent,):\n parents=[]\n for base in bases:\n parents.append(self.classlink(base,modname))\n result=result+'('+', '.join(parents)+')'\n result=result+'\\n
'\n elif type(entry)is type([]):\n result=result+'
\\n%s
\\n'%self.formattree(\n entry,modname,c)\n return '
\\n%s
\\n'%result\n \n def docmodule(self,object,name=None ,mod=None ,*ignored):\n ''\n name=object.__name__\n try :\n all=object.__all__\n except AttributeError:\n all=None\n parts=name.split('.')\n links=[]\n for i in range(len(parts)-1):\n links.append(\n '%s'%\n ('.'.join(parts[:i+1]),parts[i]))\n linkedname='.'.join(links+parts[-1:])\n head='%s'%linkedname\n try :\n path=inspect.getabsfile(object)\n url=urllib.parse.quote(path)\n filelink=self.filelink(url,path)\n except TypeError:\n filelink='(built-in)'\n info=[]\n if hasattr(object,'__version__'):\n version=str(object.__version__)\n if version[:11]=='$'+'Revision: 'and version[-1:]=='$':\n version=version[11:-1].strip()\n info.append('version %s'%self.escape(version))\n if hasattr(object,'__date__'):\n info.append(self.escape(str(object.__date__)))\n if info:\n head=head+' (%s)'%', '.join(info)\n docloc=self.getdocloc(object)\n if docloc is not None :\n docloc='
Module Reference'%locals()\n else :\n docloc=''\n result=self.heading(\n head,'#ffffff','#7799ee',\n 'index
'+filelink+docloc)\n \n modules=inspect.getmembers(object,inspect.ismodule)\n \n classes,cdict=[],{}\n for key,value in inspect.getmembers(object,inspect.isclass):\n \n if (all is not None or\n (inspect.getmodule(value)or object)is object):\n if visiblename(key,all,object):\n classes.append((key,value))\n cdict[key]=cdict[value]='#'+key\n for key,value in classes:\n for base in value.__bases__:\n key,modname=base.__name__,base.__module__\n module=sys.modules.get(modname)\n if modname !=name and module and hasattr(module,key):\n if getattr(module,key)is base:\n if not key in cdict:\n cdict[key]=cdict[base]=modname+'.html#'+key\n funcs,fdict=[],{}\n for key,value in inspect.getmembers(object,inspect.isroutine):\n \n if (all is not None or\n inspect.isbuiltin(value)or inspect.getmodule(value)is object):\n if visiblename(key,all,object):\n funcs.append((key,value))\n fdict[key]='#-'+key\n if inspect.isfunction(value):fdict[value]=fdict[key]\n data=[]\n for key,value in inspect.getmembers(object,isdata):\n if visiblename(key,all,object):\n data.append((key,value))\n \n doc=self.markup(getdoc(object),self.preformat,fdict,cdict)\n doc=doc and '%s'%doc\n result=result+'

%s

\\n'%doc\n \n if hasattr(object,'__path__'):\n modpkgs=[]\n for importer,modname,ispkg in pkgutil.iter_modules(object.__path__):\n modpkgs.append((modname,name,ispkg,0))\n modpkgs.sort()\n contents=self.multicolumn(modpkgs,self.modpkglink)\n result=result+self.bigsection(\n 'Package Contents','#ffffff','#aa55cc',contents)\n elif modules:\n contents=self.multicolumn(\n modules,lambda t:self.modulelink(t[1]))\n result=result+self.bigsection(\n 'Modules','#ffffff','#aa55cc',contents)\n \n if classes:\n classlist=[value for (key,value)in classes]\n contents=[\n self.formattree(inspect.getclasstree(classlist,1),name)]\n for key,value in classes:\n contents.append(self.document(value,key,name,fdict,cdict))\n result=result+self.bigsection(\n 'Classes','#ffffff','#ee77aa',' '.join(contents))\n if funcs:\n contents=[]\n for key,value in funcs:\n contents.append(self.document(value,key,name,fdict,cdict))\n result=result+self.bigsection(\n 'Functions','#ffffff','#eeaa77',' '.join(contents))\n if data:\n contents=[]\n for key,value in data:\n contents.append(self.document(value,key))\n result=result+self.bigsection(\n 'Data','#ffffff','#55aa55','
\\n'.join(contents))\n if hasattr(object,'__author__'):\n contents=self.markup(str(object.__author__),self.preformat)\n result=result+self.bigsection(\n 'Author','#ffffff','#7799ee',contents)\n if hasattr(object,'__credits__'):\n contents=self.markup(str(object.__credits__),self.preformat)\n result=result+self.bigsection(\n 'Credits','#ffffff','#7799ee',contents)\n \n return result\n \n def docclass(self,object,name=None ,mod=None ,funcs={},classes={},\n *ignored):\n ''\n realname=object.__name__\n name=name or realname\n bases=object.__bases__\n \n contents=[]\n push=contents.append\n \n \n class HorizontalRule:\n def __init__(self):\n self.needone=0\n def maybe(self):\n if self.needone:\n push('
\\n')\n self.needone=1\n hr=HorizontalRule()\n \n \n mro=deque(inspect.getmro(object))\n if len(mro)>2:\n hr.maybe()\n push('
Method resolution order:
\\n')\n for base in mro:\n push('
%s
\\n'%self.classlink(base,\n object.__module__))\n push('
\\n')\n \n def spill(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n try :\n value=getattr(object,name)\n except Exception:\n \n \n push(self.docdata(value,name,mod))\n else :\n push(self.document(value,name,mod,\n funcs,classes,mdict,object))\n push('\\n')\n return attrs\n \n def spilldescriptors(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n push(self.docdata(value,name,mod))\n return attrs\n \n def spilldata(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n base=self.docother(getattr(object,name),name,mod)\n doc=getdoc(value)\n if not doc:\n push('
%s
\\n'%base)\n else :\n doc=self.markup(getdoc(value),self.preformat,\n funcs,classes,mdict)\n doc='
%s'%doc\n push('
%s%s
\\n'%(base,doc))\n push('\\n')\n return attrs\n \n attrs=[(name,kind,cls,value)\n for name,kind,cls,value in classify_class_attrs(object)\n if visiblename(name,obj=object)]\n \n mdict={}\n for key,kind,homecls,value in attrs:\n mdict[key]=anchor='#'+name+'-'+key\n try :\n value=getattr(object,name)\n except Exception:\n \n \n pass\n try :\n \n \n mdict[value]=anchor\n except TypeError:\n pass\n \n while attrs:\n if mro:\n thisclass=mro.popleft()\n else :\n thisclass=attrs[0][2]\n attrs,inherited=_split_list(attrs,lambda t:t[2]is thisclass)\n \n if object is not builtins.object and thisclass is builtins.object:\n attrs=inherited\n continue\n elif thisclass is object:\n tag='defined here'\n else :\n tag='inherited from %s'%self.classlink(thisclass,\n object.__module__)\n tag +=':
\\n'\n \n sort_attributes(attrs,object)\n \n \n attrs=spill('Methods %s'%tag,attrs,\n lambda t:t[1]=='method')\n attrs=spill('Class methods %s'%tag,attrs,\n lambda t:t[1]=='class method')\n attrs=spill('Static methods %s'%tag,attrs,\n lambda t:t[1]=='static method')\n attrs=spilldescriptors(\"Readonly properties %s\"%tag,attrs,\n lambda t:t[1]=='readonly property')\n attrs=spilldescriptors('Data descriptors %s'%tag,attrs,\n lambda t:t[1]=='data descriptor')\n attrs=spilldata('Data and other attributes %s'%tag,attrs,\n lambda t:t[1]=='data')\n assert attrs ==[]\n attrs=inherited\n \n contents=''.join(contents)\n \n if name ==realname:\n title='class %s'%(\n name,realname)\n else :\n title='%s = class %s'%(\n name,name,realname)\n if bases:\n parents=[]\n for base in bases:\n parents.append(self.classlink(base,object.__module__))\n title=title+'(%s)'%', '.join(parents)\n \n decl=''\n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if argspec and argspec !='()':\n decl=name+self.escape(argspec)+'\\n\\n'\n \n doc=getdoc(object)\n if decl:\n doc=decl+(doc or '')\n doc=self.markup(doc,self.preformat,funcs,classes,mdict)\n doc=doc and '%s
 
'%doc\n \n return self.section(title,'#000000','#ffc8d8',contents,3,doc)\n \n def formatvalue(self,object):\n ''\n return self.grey('='+self.repr(object))\n \n def docroutine(self,object,name=None ,mod=None ,\n funcs={},classes={},methods={},cl=None ):\n ''\n realname=object.__name__\n name=name or realname\n anchor=(cl and cl.__name__ or '')+'-'+name\n note=''\n skipdocs=0\n if _is_bound_method(object):\n imclass=object.__self__.__class__\n if cl:\n if imclass is not cl:\n note=' from '+self.classlink(imclass,mod)\n else :\n if object.__self__ is not None :\n note=' method of %s instance'%self.classlink(\n object.__self__.__class__,mod)\n else :\n note=' unbound %s method'%self.classlink(imclass,mod)\n \n if (inspect.iscoroutinefunction(object)or\n inspect.isasyncgenfunction(object)):\n asyncqualifier='async '\n else :\n asyncqualifier=''\n \n if name ==realname:\n title='%s'%(anchor,realname)\n else :\n if cl and inspect.getattr_static(cl,realname,[])is object:\n reallink='%s'%(\n cl.__name__+'-'+realname,realname)\n skipdocs=1\n else :\n reallink=realname\n title='%s = %s'%(\n anchor,name,reallink)\n argspec=None\n if inspect.isroutine(object):\n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if realname =='':\n title='%s lambda '%name\n \n \n \n argspec=argspec[1:-1]\n if not argspec:\n argspec='(...)'\n \n decl=asyncqualifier+title+self.escape(argspec)+(note and\n self.grey('%s'%note))\n \n if skipdocs:\n return '
%s
\\n'%decl\n else :\n doc=self.markup(\n getdoc(object),self.preformat,funcs,classes,methods)\n doc=doc and '
%s
'%doc\n return '
%s
%s
\\n'%(decl,doc)\n \n def docdata(self,object,name=None ,mod=None ,cl=None ):\n ''\n results=[]\n push=results.append\n \n if name:\n push('
%s
\\n'%name)\n doc=self.markup(getdoc(object),self.preformat)\n if doc:\n push('
%s
\\n'%doc)\n push('
\\n')\n \n return ''.join(results)\n \n docproperty=docdata\n \n def docother(self,object,name=None ,mod=None ,*ignored):\n ''\n lhs=name and '%s = '%name or ''\n return lhs+self.repr(object)\n \n def index(self,dir,shadowed=None ):\n ''\n modpkgs=[]\n if shadowed is None :shadowed={}\n for importer,name,ispkg in pkgutil.iter_modules([dir]):\n if any((0xD800 <=ord(ch)<=0xDFFF)for ch in name):\n \n continue\n modpkgs.append((name,'',ispkg,name in shadowed))\n shadowed[name]=1\n \n modpkgs.sort()\n contents=self.multicolumn(modpkgs,self.modpkglink)\n return self.bigsection(dir,'#ffffff','#ee77aa',contents)\n \n \n \nclass TextRepr(Repr):\n ''\n def __init__(self):\n Repr.__init__(self)\n self.maxlist=self.maxtuple=20\n self.maxdict=10\n self.maxstring=self.maxother=100\n \n def repr1(self,x,level):\n if hasattr(type(x),'__name__'):\n methodname='repr_'+'_'.join(type(x).__name__.split())\n if hasattr(self,methodname):\n return getattr(self,methodname)(x,level)\n return cram(stripid(repr(x)),self.maxother)\n \n def repr_string(self,x,level):\n test=cram(x,self.maxstring)\n testrepr=repr(test)\n if '\\\\'in test and '\\\\'not in replace(testrepr,r'\\\\',''):\n \n \n return 'r'+testrepr[0]+test+testrepr[0]\n return testrepr\n \n repr_str=repr_string\n \n def repr_instance(self,x,level):\n try :\n return cram(stripid(repr(x)),self.maxstring)\n except :\n return '<%s instance>'%x.__class__.__name__\n \nclass TextDoc(Doc):\n ''\n \n \n \n _repr_instance=TextRepr()\n repr=_repr_instance.repr\n \n def bold(self,text):\n ''\n return ''.join(ch+'\\b'+ch for ch in text)\n \n def indent(self,text,prefix=' '):\n ''\n if not text:return ''\n lines=[prefix+line for line in text.split('\\n')]\n if lines:lines[-1]=lines[-1].rstrip()\n return '\\n'.join(lines)\n \n def section(self,title,contents):\n ''\n clean_contents=self.indent(contents).rstrip()\n return self.bold(title)+'\\n'+clean_contents+'\\n\\n'\n \n \n \n def formattree(self,tree,modname,parent=None ,prefix=''):\n ''\n result=''\n for entry in tree:\n if type(entry)is type(()):\n c,bases=entry\n result=result+prefix+classname(c,modname)\n if bases and bases !=(parent,):\n parents=(classname(c,modname)for c in bases)\n result=result+'(%s)'%', '.join(parents)\n result=result+'\\n'\n elif type(entry)is type([]):\n result=result+self.formattree(\n entry,modname,c,prefix+' ')\n return result\n \n def docmodule(self,object,name=None ,mod=None ):\n ''\n name=object.__name__\n synop,desc=splitdoc(getdoc(object))\n result=self.section('NAME',name+(synop and ' - '+synop))\n all=getattr(object,'__all__',None )\n docloc=self.getdocloc(object)\n if docloc is not None :\n result=result+self.section('MODULE REFERENCE',docloc+\"\"\"\n\nThe following documentation is automatically generated from the Python\nsource files. It may be incomplete, incorrect or include features that\nare considered implementation detail and may vary between Python\nimplementations. When in doubt, consult the module reference at the\nlocation listed above.\n\"\"\")\n \n if desc:\n result=result+self.section('DESCRIPTION',desc)\n \n classes=[]\n for key,value in inspect.getmembers(object,inspect.isclass):\n \n if (all is not None\n or (inspect.getmodule(value)or object)is object):\n if visiblename(key,all,object):\n classes.append((key,value))\n funcs=[]\n for key,value in inspect.getmembers(object,inspect.isroutine):\n \n if (all is not None or\n inspect.isbuiltin(value)or inspect.getmodule(value)is object):\n if visiblename(key,all,object):\n funcs.append((key,value))\n data=[]\n for key,value in inspect.getmembers(object,isdata):\n if visiblename(key,all,object):\n data.append((key,value))\n \n modpkgs=[]\n modpkgs_names=set()\n if hasattr(object,'__path__'):\n for importer,modname,ispkg in pkgutil.iter_modules(object.__path__):\n modpkgs_names.add(modname)\n if ispkg:\n modpkgs.append(modname+' (package)')\n else :\n modpkgs.append(modname)\n \n modpkgs.sort()\n result=result+self.section(\n 'PACKAGE CONTENTS','\\n'.join(modpkgs))\n \n \n submodules=[]\n for key,value in inspect.getmembers(object,inspect.ismodule):\n if value.__name__.startswith(name+'.')and key not in modpkgs_names:\n submodules.append(key)\n if submodules:\n submodules.sort()\n result=result+self.section(\n 'SUBMODULES','\\n'.join(submodules))\n \n if classes:\n classlist=[value for key,value in classes]\n contents=[self.formattree(\n inspect.getclasstree(classlist,1),name)]\n for key,value in classes:\n contents.append(self.document(value,key,name))\n result=result+self.section('CLASSES','\\n'.join(contents))\n \n if funcs:\n contents=[]\n for key,value in funcs:\n contents.append(self.document(value,key,name))\n result=result+self.section('FUNCTIONS','\\n'.join(contents))\n \n if data:\n contents=[]\n for key,value in data:\n contents.append(self.docother(value,key,name,maxlen=70))\n result=result+self.section('DATA','\\n'.join(contents))\n \n if hasattr(object,'__version__'):\n version=str(object.__version__)\n if version[:11]=='$'+'Revision: 'and version[-1:]=='$':\n version=version[11:-1].strip()\n result=result+self.section('VERSION',version)\n if hasattr(object,'__date__'):\n result=result+self.section('DATE',str(object.__date__))\n if hasattr(object,'__author__'):\n result=result+self.section('AUTHOR',str(object.__author__))\n if hasattr(object,'__credits__'):\n result=result+self.section('CREDITS',str(object.__credits__))\n try :\n file=inspect.getabsfile(object)\n except TypeError:\n file='(built-in)'\n result=result+self.section('FILE',file)\n return result\n \n def docclass(self,object,name=None ,mod=None ,*ignored):\n ''\n realname=object.__name__\n name=name or realname\n bases=object.__bases__\n \n def makename(c,m=object.__module__):\n return classname(c,m)\n \n if name ==realname:\n title='class '+self.bold(realname)\n else :\n title=self.bold(name)+' = class '+realname\n if bases:\n parents=map(makename,bases)\n title=title+'(%s)'%', '.join(parents)\n \n contents=[]\n push=contents.append\n \n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if argspec and argspec !='()':\n push(name+argspec+'\\n')\n \n doc=getdoc(object)\n if doc:\n push(doc+'\\n')\n \n \n mro=deque(inspect.getmro(object))\n if len(mro)>2:\n push(\"Method resolution order:\")\n for base in mro:\n push(' '+makename(base))\n push('')\n \n \n subclasses=sorted(\n (str(cls.__name__)for cls in type.__subclasses__(object)\n if not cls.__name__.startswith(\"_\")and cls.__module__ ==\"builtins\"),\n key=str.lower\n )\n no_of_subclasses=len(subclasses)\n MAX_SUBCLASSES_TO_DISPLAY=4\n if subclasses:\n push(\"Built-in subclasses:\")\n for subclassname in subclasses[:MAX_SUBCLASSES_TO_DISPLAY]:\n push(' '+subclassname)\n if no_of_subclasses >MAX_SUBCLASSES_TO_DISPLAY:\n push(' ... and '+\n str(no_of_subclasses -MAX_SUBCLASSES_TO_DISPLAY)+\n ' other subclasses')\n push('')\n \n \n class HorizontalRule:\n def __init__(self):\n self.needone=0\n def maybe(self):\n if self.needone:\n push('-'*70)\n self.needone=1\n hr=HorizontalRule()\n \n def spill(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n try :\n value=getattr(object,name)\n except Exception:\n \n \n push(self.docdata(value,name,mod))\n else :\n push(self.document(value,\n name,mod,object))\n return attrs\n \n def spilldescriptors(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n push(self.docdata(value,name,mod))\n return attrs\n \n def spilldata(msg,attrs,predicate):\n ok,attrs=_split_list(attrs,predicate)\n if ok:\n hr.maybe()\n push(msg)\n for name,kind,homecls,value in ok:\n doc=getdoc(value)\n try :\n obj=getattr(object,name)\n except AttributeError:\n obj=homecls.__dict__[name]\n push(self.docother(obj,name,mod,maxlen=70,doc=doc)+\n '\\n')\n return attrs\n \n attrs=[(name,kind,cls,value)\n for name,kind,cls,value in classify_class_attrs(object)\n if visiblename(name,obj=object)]\n \n while attrs:\n if mro:\n thisclass=mro.popleft()\n else :\n thisclass=attrs[0][2]\n attrs,inherited=_split_list(attrs,lambda t:t[2]is thisclass)\n \n if object is not builtins.object and thisclass is builtins.object:\n attrs=inherited\n continue\n elif thisclass is object:\n tag=\"defined here\"\n else :\n tag=\"inherited from %s\"%classname(thisclass,\n object.__module__)\n \n sort_attributes(attrs,object)\n \n \n attrs=spill(\"Methods %s:\\n\"%tag,attrs,\n lambda t:t[1]=='method')\n attrs=spill(\"Class methods %s:\\n\"%tag,attrs,\n lambda t:t[1]=='class method')\n attrs=spill(\"Static methods %s:\\n\"%tag,attrs,\n lambda t:t[1]=='static method')\n attrs=spilldescriptors(\"Readonly properties %s:\\n\"%tag,attrs,\n lambda t:t[1]=='readonly property')\n attrs=spilldescriptors(\"Data descriptors %s:\\n\"%tag,attrs,\n lambda t:t[1]=='data descriptor')\n attrs=spilldata(\"Data and other attributes %s:\\n\"%tag,attrs,\n lambda t:t[1]=='data')\n \n assert attrs ==[]\n attrs=inherited\n \n contents='\\n'.join(contents)\n if not contents:\n return title+'\\n'\n return title+'\\n'+self.indent(contents.rstrip(),' | ')+'\\n'\n \n def formatvalue(self,object):\n ''\n return '='+self.repr(object)\n \n def docroutine(self,object,name=None ,mod=None ,cl=None ):\n ''\n realname=object.__name__\n name=name or realname\n note=''\n skipdocs=0\n if _is_bound_method(object):\n imclass=object.__self__.__class__\n if cl:\n if imclass is not cl:\n note=' from '+classname(imclass,mod)\n else :\n if object.__self__ is not None :\n note=' method of %s instance'%classname(\n object.__self__.__class__,mod)\n else :\n note=' unbound %s method'%classname(imclass,mod)\n \n if (inspect.iscoroutinefunction(object)or\n inspect.isasyncgenfunction(object)):\n asyncqualifier='async '\n else :\n asyncqualifier=''\n \n if name ==realname:\n title=self.bold(realname)\n else :\n if cl and inspect.getattr_static(cl,realname,[])is object:\n skipdocs=1\n title=self.bold(name)+' = '+realname\n argspec=None\n \n if inspect.isroutine(object):\n try :\n signature=inspect.signature(object)\n except (ValueError,TypeError):\n signature=None\n if signature:\n argspec=str(signature)\n if realname =='':\n title=self.bold(name)+' lambda '\n \n \n \n argspec=argspec[1:-1]\n if not argspec:\n argspec='(...)'\n decl=asyncqualifier+title+argspec+note\n \n if skipdocs:\n return decl+'\\n'\n else :\n doc=getdoc(object)or ''\n return decl+'\\n'+(doc and self.indent(doc).rstrip()+'\\n')\n \n def docdata(self,object,name=None ,mod=None ,cl=None ):\n ''\n results=[]\n push=results.append\n \n if name:\n push(self.bold(name))\n push('\\n')\n doc=getdoc(object)or ''\n if doc:\n push(self.indent(doc))\n push('\\n')\n return ''.join(results)\n \n docproperty=docdata\n \n def docother(self,object,name=None ,mod=None ,parent=None ,maxlen=None ,doc=None ):\n ''\n repr=self.repr(object)\n if maxlen:\n line=(name and name+' = 'or '')+repr\n chop=maxlen -len(line)\n if chop <0:repr=repr[:chop]+'...'\n line=(name and self.bold(name)+' = 'or '')+repr\n if not doc:\n doc=getdoc(object)\n if doc:\n line +='\\n'+self.indent(str(doc))+'\\n'\n return line\n \nclass _PlainTextDoc(TextDoc):\n ''\n def bold(self,text):\n return text\n \n \n \ndef pager(text):\n ''\n global pager\n pager=getpager()\n pager(text)\n \ndef getpager():\n ''\n if not hasattr(sys.stdin,\"isatty\"):\n return plainpager\n if not hasattr(sys.stdout,\"isatty\"):\n return plainpager\n if not sys.stdin.isatty()or not sys.stdout.isatty():\n return plainpager\n use_pager=os.environ.get('MANPAGER')or os.environ.get('PAGER')\n if use_pager:\n if sys.platform =='win32':\n return lambda text:tempfilepager(plain(text),use_pager)\n elif os.environ.get('TERM')in ('dumb','emacs'):\n return lambda text:pipepager(plain(text),use_pager)\n else :\n return lambda text:pipepager(text,use_pager)\n if os.environ.get('TERM')in ('dumb','emacs'):\n return plainpager\n if sys.platform =='win32':\n return lambda text:tempfilepager(plain(text),'more <')\n if hasattr(os,'system')and os.system('(less) 2>/dev/null')==0:\n return lambda text:pipepager(text,'less')\n \n import tempfile\n (fd,filename)=tempfile.mkstemp()\n os.close(fd)\n try :\n if hasattr(os,'system')and os.system('more \"%s\"'%filename)==0:\n return lambda text:pipepager(text,'more')\n else :\n return ttypager\n finally :\n os.unlink(filename)\n \ndef plain(text):\n ''\n return re.sub('.\\b','',text)\n \ndef pipepager(text,cmd):\n ''\n import subprocess\n proc=subprocess.Popen(cmd,shell=True ,stdin=subprocess.PIPE)\n try :\n with io.TextIOWrapper(proc.stdin,errors='backslashreplace')as pipe:\n try :\n pipe.write(text)\n except KeyboardInterrupt:\n \n \n pass\n except OSError:\n pass\n while True :\n try :\n proc.wait()\n break\n except KeyboardInterrupt:\n \n \n pass\n \ndef tempfilepager(text,cmd):\n ''\n import tempfile\n filename=tempfile.mktemp()\n with open(filename,'w',errors='backslashreplace')as file:\n file.write(text)\n try :\n os.system(cmd+' \"'+filename+'\"')\n finally :\n os.unlink(filename)\n \ndef _escape_stdout(text):\n\n encoding=getattr(sys.stdout,'encoding',None )or 'utf-8'\n return text.encode(encoding,'backslashreplace').decode(encoding)\n \ndef ttypager(text):\n ''\n lines=plain(_escape_stdout(text)).split('\\n')\n try :\n import tty\n fd=sys.stdin.fileno()\n old=tty.tcgetattr(fd)\n tty.setcbreak(fd)\n getchar=lambda :sys.stdin.read(1)\n except (ImportError,AttributeError,io.UnsupportedOperation):\n tty=None\n getchar=lambda :sys.stdin.readline()[:-1][:1]\n \n try :\n try :\n h=int(os.environ.get('LINES',0))\n except ValueError:\n h=0\n if h <=1:\n h=25\n r=inc=h -1\n sys.stdout.write('\\n'.join(lines[:inc])+'\\n')\n while lines[r:]:\n sys.stdout.write('-- more --')\n sys.stdout.flush()\n c=getchar()\n \n if c in ('q','Q'):\n sys.stdout.write('\\r \\r')\n break\n elif c in ('\\r','\\n'):\n sys.stdout.write('\\r \\r'+lines[r]+'\\n')\n r=r+1\n continue\n if c in ('b','B','\\x1b'):\n r=r -inc -inc\n if r <0:r=0\n sys.stdout.write('\\n'+'\\n'.join(lines[r:r+inc])+'\\n')\n r=r+inc\n \n finally :\n if tty:\n tty.tcsetattr(fd,tty.TCSAFLUSH,old)\n \ndef plainpager(text):\n ''\n sys.stdout.write(plain(_escape_stdout(text)))\n \ndef describe(thing):\n ''\n if inspect.ismodule(thing):\n if thing.__name__ in sys.builtin_module_names:\n return 'built-in module '+thing.__name__\n if hasattr(thing,'__path__'):\n return 'package '+thing.__name__\n else :\n return 'module '+thing.__name__\n if inspect.isbuiltin(thing):\n return 'built-in function '+thing.__name__\n if inspect.isgetsetdescriptor(thing):\n return 'getset descriptor %s.%s.%s'%(\n thing.__objclass__.__module__,thing.__objclass__.__name__,\n thing.__name__)\n if inspect.ismemberdescriptor(thing):\n return 'member descriptor %s.%s.%s'%(\n thing.__objclass__.__module__,thing.__objclass__.__name__,\n thing.__name__)\n if inspect.isclass(thing):\n return 'class '+thing.__name__\n if inspect.isfunction(thing):\n return 'function '+thing.__name__\n if inspect.ismethod(thing):\n return 'method '+thing.__name__\n return type(thing).__name__\n \ndef locate(path,forceload=0):\n ''\n parts=[part for part in path.split('.')if part]\n module,n=None ,0\n while n >','&',\n '|','^','~','<','>','<=','>=','==','!=','<>'),\n 'COMPARISON':('<','>','<=','>=','==','!=','<>'),\n 'UNARY':('-','~'),\n 'AUGMENTEDASSIGNMENT':('+=','-=','*=','/=','%=','&=','|=',\n '^=','<<=','>>=','**=','//='),\n 'BITWISE':('<<','>>','&','|','^','~'),\n 'COMPLEX':('j','J')\n }\n symbols={\n '%':'OPERATORS FORMATTING',\n '**':'POWER',\n ',':'TUPLES LISTS FUNCTIONS',\n '.':'ATTRIBUTES FLOAT MODULES OBJECTS',\n '...':'ELLIPSIS',\n ':':'SLICINGS DICTIONARYLITERALS',\n '@':'def class',\n '\\\\':'STRINGS',\n '_':'PRIVATENAMES',\n '__':'PRIVATENAMES SPECIALMETHODS',\n '`':'BACKQUOTES',\n '(':'TUPLES FUNCTIONS CALLS',\n ')':'TUPLES FUNCTIONS CALLS',\n '[':'LISTS SUBSCRIPTS SLICINGS',\n ']':'LISTS SUBSCRIPTS SLICINGS'\n }\n for topic,symbols_ in _symbols_inverse.items():\n for symbol in symbols_:\n topics=symbols.get(symbol,topic)\n if topic not in topics:\n topics=topics+' '+topic\n symbols[symbol]=topics\n \n topics={\n 'TYPES':('types','STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '\n 'FUNCTIONS CLASSES MODULES FILES inspect'),\n 'STRINGS':('strings','str UNICODE SEQUENCES STRINGMETHODS '\n 'FORMATTING TYPES'),\n 'STRINGMETHODS':('string-methods','STRINGS FORMATTING'),\n 'FORMATTING':('formatstrings','OPERATORS'),\n 'UNICODE':('strings','encodings unicode SEQUENCES STRINGMETHODS '\n 'FORMATTING TYPES'),\n 'NUMBERS':('numbers','INTEGER FLOAT COMPLEX TYPES'),\n 'INTEGER':('integers','int range'),\n 'FLOAT':('floating','float math'),\n 'COMPLEX':('imaginary','complex cmath'),\n 'SEQUENCES':('typesseq','STRINGMETHODS FORMATTING range LISTS'),\n 'MAPPINGS':'DICTIONARIES',\n 'FUNCTIONS':('typesfunctions','def TYPES'),\n 'METHODS':('typesmethods','class def CLASSES TYPES'),\n 'CODEOBJECTS':('bltin-code-objects','compile FUNCTIONS TYPES'),\n 'TYPEOBJECTS':('bltin-type-objects','types TYPES'),\n 'FRAMEOBJECTS':'TYPES',\n 'TRACEBACKS':'TYPES',\n 'NONE':('bltin-null-object',''),\n 'ELLIPSIS':('bltin-ellipsis-object','SLICINGS'),\n 'SPECIALATTRIBUTES':('specialattrs',''),\n 'CLASSES':('types','class SPECIALMETHODS PRIVATENAMES'),\n 'MODULES':('typesmodules','import'),\n 'PACKAGES':'import',\n 'EXPRESSIONS':('operator-summary','lambda or and not in is BOOLEAN '\n 'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '\n 'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '\n 'LISTS DICTIONARIES'),\n 'OPERATORS':'EXPRESSIONS',\n 'PRECEDENCE':'EXPRESSIONS',\n 'OBJECTS':('objects','TYPES'),\n 'SPECIALMETHODS':('specialnames','BASICMETHODS ATTRIBUTEMETHODS '\n 'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '\n 'NUMBERMETHODS CLASSES'),\n 'BASICMETHODS':('customization','hash repr str SPECIALMETHODS'),\n 'ATTRIBUTEMETHODS':('attribute-access','ATTRIBUTES SPECIALMETHODS'),\n 'CALLABLEMETHODS':('callable-types','CALLS SPECIALMETHODS'),\n 'SEQUENCEMETHODS':('sequence-types','SEQUENCES SEQUENCEMETHODS '\n 'SPECIALMETHODS'),\n 'MAPPINGMETHODS':('sequence-types','MAPPINGS SPECIALMETHODS'),\n 'NUMBERMETHODS':('numeric-types','NUMBERS AUGMENTEDASSIGNMENT '\n 'SPECIALMETHODS'),\n 'EXECUTION':('execmodel','NAMESPACES DYNAMICFEATURES EXCEPTIONS'),\n 'NAMESPACES':('naming','global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),\n 'DYNAMICFEATURES':('dynamic-features',''),\n 'SCOPING':'NAMESPACES',\n 'FRAMES':'NAMESPACES',\n 'EXCEPTIONS':('exceptions','try except finally raise'),\n 'CONVERSIONS':('conversions',''),\n 'IDENTIFIERS':('identifiers','keywords SPECIALIDENTIFIERS'),\n 'SPECIALIDENTIFIERS':('id-classes',''),\n 'PRIVATENAMES':('atom-identifiers',''),\n 'LITERALS':('atom-literals','STRINGS NUMBERS TUPLELITERALS '\n 'LISTLITERALS DICTIONARYLITERALS'),\n 'TUPLES':'SEQUENCES',\n 'TUPLELITERALS':('exprlists','TUPLES LITERALS'),\n 'LISTS':('typesseq-mutable','LISTLITERALS'),\n 'LISTLITERALS':('lists','LISTS LITERALS'),\n 'DICTIONARIES':('typesmapping','DICTIONARYLITERALS'),\n 'DICTIONARYLITERALS':('dict','DICTIONARIES LITERALS'),\n 'ATTRIBUTES':('attribute-references','getattr hasattr setattr ATTRIBUTEMETHODS'),\n 'SUBSCRIPTS':('subscriptions','SEQUENCEMETHODS'),\n 'SLICINGS':('slicings','SEQUENCEMETHODS'),\n 'CALLS':('calls','EXPRESSIONS'),\n 'POWER':('power','EXPRESSIONS'),\n 'UNARY':('unary','EXPRESSIONS'),\n 'BINARY':('binary','EXPRESSIONS'),\n 'SHIFTING':('shifting','EXPRESSIONS'),\n 'BITWISE':('bitwise','EXPRESSIONS'),\n 'COMPARISON':('comparisons','EXPRESSIONS BASICMETHODS'),\n 'BOOLEAN':('booleans','EXPRESSIONS TRUTHVALUE'),\n 'ASSERTION':'assert',\n 'ASSIGNMENT':('assignment','AUGMENTEDASSIGNMENT'),\n 'AUGMENTEDASSIGNMENT':('augassign','NUMBERMETHODS'),\n 'DELETION':'del',\n 'RETURNING':'return',\n 'IMPORTING':'import',\n 'CONDITIONAL':'if',\n 'LOOPING':('compound','for while break continue'),\n 'TRUTHVALUE':('truth','if while and or not BASICMETHODS'),\n 'DEBUGGING':('debugger','pdb'),\n 'CONTEXTMANAGERS':('context-managers','with'),\n }\n \n def __init__(self,input=None ,output=None ):\n self._input=input\n self._output=output\n \n @property\n def input(self):\n return self._input or sys.stdin\n \n @property\n def output(self):\n return self._output or sys.stdout\n \n def __repr__(self):\n if inspect.stack()[1][3]=='?':\n self()\n return ''\n return '<%s.%s instance>'%(self.__class__.__module__,\n self.__class__.__qualname__)\n \n _GoInteractive=object()\n def __call__(self,request=_GoInteractive):\n if request is not self._GoInteractive:\n self.help(request)\n else :\n self.intro()\n self.interact()\n self.output.write('''\nYou are now leaving help and returning to the Python interpreter.\nIf you want to ask for help on a particular object directly from the\ninterpreter, you can type \"help(object)\". Executing \"help('string')\"\nhas the same effect as typing a particular string at the help> prompt.\n''')\n \n def interact(self):\n self.output.write('\\n')\n while True :\n try :\n request=self.getline('help> ')\n if not request:break\n except (KeyboardInterrupt,EOFError):\n break\n request=request.strip()\n \n \n \n if (len(request)>2 and request[0]==request[-1]in (\"'\",'\"')\n and request[0]not in request[1:-1]):\n request=request[1:-1]\n if request.lower()in ('q','quit'):break\n if request =='help':\n self.intro()\n else :\n self.help(request)\n \n def getline(self,prompt):\n ''\n if self.input is sys.stdin:\n return input(prompt)\n else :\n self.output.write(prompt)\n self.output.flush()\n return self.input.readline()\n \n def help(self,request):\n if type(request)is type(''):\n request=request.strip()\n if request =='keywords':self.listkeywords()\n elif request =='symbols':self.listsymbols()\n elif request =='topics':self.listtopics()\n elif request =='modules':self.listmodules()\n elif request[:8]=='modules ':\n self.listmodules(request.split()[1])\n elif request in self.symbols:self.showsymbol(request)\n elif request in ['True','False','None']:\n \n doc(eval(request),'Help on %s:')\n elif request in self.keywords:self.showtopic(request)\n elif request in self.topics:self.showtopic(request)\n elif request:doc(request,'Help on %s:',output=self._output)\n else :doc(str,'Help on %s:',output=self._output)\n elif isinstance(request,Helper):self()\n else :doc(request,'Help on %s:',output=self._output)\n self.output.write('\\n')\n \n def intro(self):\n self.output.write('''\nWelcome to Python {0}'s help utility!\n\nIf this is your first time using Python, you should definitely check out\nthe tutorial on the Internet at https://docs.python.org/{0}/tutorial/.\n\nEnter the name of any module, keyword, or topic to get help on writing\nPython programs and using Python modules. To quit this help utility and\nreturn to the interpreter, just type \"quit\".\n\nTo get a list of available modules, keywords, symbols, or topics, type\n\"modules\", \"keywords\", \"symbols\", or \"topics\". Each module also comes\nwith a one-line summary of what it does; to list the modules whose name\nor summary contain a given string such as \"spam\", type \"modules spam\".\n'''.format('%d.%d'%sys.version_info[:2]))\n \n def list(self,items,columns=4,width=80):\n items=list(sorted(items))\n colw=width //columns\n rows=(len(items)+columns -1)//columns\n for row in range(rows):\n for col in range(columns):\n i=col *rows+row\n if i =0:\n callback(None ,modname,desc)\n \n for importer,modname,ispkg in pkgutil.walk_packages(onerror=onerror):\n if self.quit:\n break\n \n if key is None :\n callback(None ,modname,'')\n else :\n try :\n spec=pkgutil._get_spec(importer,modname)\n except SyntaxError:\n \n continue\n loader=spec.loader\n if hasattr(loader,'get_source'):\n try :\n source=loader.get_source(modname)\n except Exception:\n if onerror:\n onerror(modname)\n continue\n desc=source_synopsis(io.StringIO(source))or ''\n if hasattr(loader,'get_filename'):\n path=loader.get_filename(modname)\n else :\n path=None\n else :\n try :\n module=importlib._bootstrap._load(spec)\n except ImportError:\n if onerror:\n onerror(modname)\n continue\n desc=module.__doc__.splitlines()[0]if module.__doc__ else ''\n path=getattr(module,'__file__',None )\n name=modname+' - '+desc\n if name.lower().find(key)>=0:\n callback(path,modname,desc)\n \n if completer:\n completer()\n \ndef apropos(key):\n ''\n def callback(path,modname,desc):\n if modname[-9:]=='.__init__':\n modname=modname[:-9]+' (package)'\n print(modname,desc and '- '+desc)\n def onerror(modname):\n pass\n with warnings.catch_warnings():\n warnings.filterwarnings('ignore')\n ModuleScanner().run(callback,key,onerror=onerror)\n \n \n \ndef _start_server(urlhandler,hostname,port):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n import http.server\n import email.message\n import select\n import threading\n \n class DocHandler(http.server.BaseHTTPRequestHandler):\n \n def do_GET(self):\n ''\n\n\n\n \n if self.path.endswith('.css'):\n content_type='text/css'\n else :\n content_type='text/html'\n self.send_response(200)\n self.send_header('Content-Type','%s; charset=UTF-8'%content_type)\n self.end_headers()\n self.wfile.write(self.urlhandler(\n self.path,content_type).encode('utf-8'))\n \n def log_message(self,*args):\n \n pass\n \n class DocServer(http.server.HTTPServer):\n \n def __init__(self,host,port,callback):\n self.host=host\n self.address=(self.host,port)\n self.callback=callback\n self.base.__init__(self,self.address,self.handler)\n self.quit=False\n \n def serve_until_quit(self):\n while not self.quit:\n rd,wr,ex=select.select([self.socket.fileno()],[],[],1)\n if rd:\n self.handle_request()\n self.server_close()\n \n def server_activate(self):\n self.base.server_activate(self)\n if self.callback:\n self.callback(self)\n \n class ServerThread(threading.Thread):\n \n def __init__(self,urlhandler,host,port):\n self.urlhandler=urlhandler\n self.host=host\n self.port=int(port)\n threading.Thread.__init__(self)\n self.serving=False\n self.error=None\n \n def run(self):\n ''\n try :\n DocServer.base=http.server.HTTPServer\n DocServer.handler=DocHandler\n DocHandler.MessageClass=email.message.Message\n DocHandler.urlhandler=staticmethod(self.urlhandler)\n docsvr=DocServer(self.host,self.port,self.ready)\n self.docserver=docsvr\n docsvr.serve_until_quit()\n except Exception as e:\n self.error=e\n \n def ready(self,server):\n self.serving=True\n self.host=server.host\n self.port=server.server_port\n self.url='http://%s:%d/'%(self.host,self.port)\n \n def stop(self):\n ''\n self.docserver.quit=True\n self.join()\n \n \n self.docserver=None\n self.serving=False\n self.url=None\n \n thread=ServerThread(urlhandler,hostname,port)\n thread.start()\n \n \n while not thread.error and not thread.serving:\n time.sleep(.01)\n return thread\n \n \ndef _url_handler(url,content_type=\"text/html\"):\n ''\n\n\n\n\n\n\n \n class _HTMLDoc(HTMLDoc):\n \n def page(self,title,contents):\n ''\n css_path=\"pydoc_data/_pydoc.css\"\n css_link=(\n ''%\n css_path)\n return '''\\\n\nPydoc: %s\n\n%s%s
%s
\n'''%(title,css_link,html_navbar(),contents)\n \n def filelink(self,url,path):\n return '%s'%(url,path)\n \n \n html=_HTMLDoc()\n \n def html_navbar():\n version=html.escape(\"%s [%s, %s]\"%(platform.python_version(),\n platform.python_build()[0],\n platform.python_compiler()))\n return \"\"\"\n
\n Python %s
%s\n
\n
\n \n
\n
\n \n \n
 \n
\n \n \n
\n
\n
\n \"\"\"%(version,html.escape(platform.platform(terse=True )))\n \n def html_index():\n ''\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n heading=html.heading(\n 'Index of Modules',\n '#ffffff','#7799ee')\n names=[name for name in sys.builtin_module_names\n if name !='__main__']\n contents=html.multicolumn(names,bltinlink)\n contents=[heading,'

'+html.bigsection(\n 'Built-in Modules','#ffffff','#ee77aa',contents)]\n \n seen={}\n for dir in sys.path:\n contents.append(html.index(dir,seen))\n \n contents.append(\n '

pydoc by Ka-Ping Yee'\n '<ping@lfw.org>')\n return 'Index of Modules',''.join(contents)\n \n def html_search(key):\n ''\n \n search_result=[]\n \n def callback(path,modname,desc):\n if modname[-9:]=='.__init__':\n modname=modname[:-9]+' (package)'\n search_result.append((modname,desc and '- '+desc))\n \n with warnings.catch_warnings():\n warnings.filterwarnings('ignore')\n def onerror(modname):\n pass\n ModuleScanner().run(callback,key,onerror=onerror)\n \n \n def bltinlink(name):\n return '%s'%(name,name)\n \n results=[]\n heading=html.heading(\n 'Search Results',\n '#ffffff','#7799ee')\n for name,desc in search_result:\n results.append(bltinlink(name)+desc)\n contents=heading+html.bigsection(\n 'key = %s'%key,'#ffffff','#ee77aa','
'.join(results))\n return 'Search Results',contents\n \n def html_getfile(path):\n ''\n path=urllib.parse.unquote(path)\n with tokenize.open(path)as fp:\n lines=html.escape(fp.read())\n body='

%s
'%lines\n heading=html.heading(\n 'File Listing',\n '#ffffff','#7799ee')\n contents=heading+html.bigsection(\n 'File: %s'%path,'#ffffff','#ee77aa',body)\n return 'getfile %s'%path,contents\n \n def html_topics():\n ''\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n heading=html.heading(\n 'INDEX',\n '#ffffff','#7799ee')\n names=sorted(Helper.topics.keys())\n \n contents=html.multicolumn(names,bltinlink)\n contents=heading+html.bigsection(\n 'Topics','#ffffff','#ee77aa',contents)\n return 'Topics',contents\n \n def html_keywords():\n ''\n heading=html.heading(\n 'INDEX',\n '#ffffff','#7799ee')\n names=sorted(Helper.keywords.keys())\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n contents=html.multicolumn(names,bltinlink)\n contents=heading+html.bigsection(\n 'Keywords','#ffffff','#ee77aa',contents)\n return 'Keywords',contents\n \n def html_topicpage(topic):\n ''\n buf=io.StringIO()\n htmlhelp=Helper(buf,buf)\n contents,xrefs=htmlhelp._gettopic(topic)\n if topic in htmlhelp.keywords:\n title='KEYWORD'\n else :\n title='TOPIC'\n heading=html.heading(\n '%s'%title,\n '#ffffff','#7799ee')\n contents='
%s
'%html.markup(contents)\n contents=html.bigsection(topic,'#ffffff','#ee77aa',contents)\n if xrefs:\n xrefs=sorted(xrefs.split())\n \n def bltinlink(name):\n return '%s'%(name,name)\n \n xrefs=html.multicolumn(xrefs,bltinlink)\n xrefs=html.section('Related help topics: ',\n '#ffffff','#ee77aa',xrefs)\n return ('%s %s'%(title,topic),\n ''.join((heading,contents,xrefs)))\n \n def html_getobj(url):\n obj=locate(url,forceload=1)\n if obj is None and url !='None':\n raise ValueError('could not find object')\n title=describe(obj)\n content=html.document(obj,url)\n return title,content\n \n def html_error(url,exc):\n heading=html.heading(\n 'Error',\n '#ffffff','#7799ee')\n contents='
'.join(html.escape(line)for line in\n format_exception_only(type(exc),exc))\n contents=heading+html.bigsection(url,'#ffffff','#bb0000',\n contents)\n return \"Error - %s\"%url,contents\n \n def get_html_page(url):\n ''\n complete_url=url\n if url.endswith('.html'):\n url=url[:-5]\n try :\n if url in (\"\",\"index\"):\n title,content=html_index()\n elif url ==\"topics\":\n title,content=html_topics()\n elif url ==\"keywords\":\n title,content=html_keywords()\n elif '='in url:\n op,_,url=url.partition('=')\n if op ==\"search?key\":\n title,content=html_search(url)\n elif op ==\"getfile?key\":\n title,content=html_getfile(url)\n elif op ==\"topic?key\":\n \n try :\n title,content=html_topicpage(url)\n except ValueError:\n title,content=html_getobj(url)\n elif op ==\"get?key\":\n \n if url in (\"\",\"index\"):\n title,content=html_index()\n else :\n try :\n title,content=html_getobj(url)\n except ValueError:\n title,content=html_topicpage(url)\n else :\n raise ValueError('bad pydoc url')\n else :\n title,content=html_getobj(url)\n except Exception as exc:\n \n title,content=html_error(complete_url,exc)\n return html.page(title,content)\n \n if url.startswith('/'):\n url=url[1:]\n if content_type =='text/css':\n path_here=os.path.dirname(os.path.realpath(__file__))\n css_path=os.path.join(path_here,url)\n with open(css_path)as fp:\n return ''.join(fp.readlines())\n elif content_type =='text/html':\n return get_html_page(url)\n \n raise TypeError('unknown content type %r for url %s'%(content_type,url))\n \n \ndef browse(port=0,*,open_browser=True ,hostname='localhost'):\n ''\n\n\n\n \n import webbrowser\n serverthread=_start_server(_url_handler,hostname,port)\n if serverthread.error:\n print(serverthread.error)\n return\n if serverthread.serving:\n server_help_msg='Server commands: [b]rowser, [q]uit'\n if open_browser:\n webbrowser.open(serverthread.url)\n try :\n print('Server ready at',serverthread.url)\n print(server_help_msg)\n while serverthread.serving:\n cmd=input('server> ')\n cmd=cmd.lower()\n if cmd =='q':\n break\n elif cmd =='b':\n webbrowser.open(serverthread.url)\n else :\n print(server_help_msg)\n except (KeyboardInterrupt,EOFError):\n print()\n finally :\n if serverthread.serving:\n serverthread.stop()\n print('Server stopped')\n \n \n \n \ndef ispath(x):\n return isinstance(x,str)and x.find(os.sep)>=0\n \ndef _get_revised_path(given_path,argv0):\n ''\n\n\n\n\n \n \n \n \n \n \n \n if ''in given_path or os.curdir in given_path or os.getcwd()in given_path:\n return None\n \n \n \n stdlib_dir=os.path.dirname(__file__)\n script_dir=os.path.dirname(argv0)\n revised_path=given_path.copy()\n if script_dir in given_path and not os.path.samefile(script_dir,stdlib_dir):\n revised_path.remove(script_dir)\n revised_path.insert(0,os.getcwd())\n return revised_path\n \n \n \ndef _adjust_cli_sys_path():\n ''\n\n\n \n revised_path=_get_revised_path(sys.path,sys.argv[0])\n if revised_path is not None :\n sys.path[:]=revised_path\n \n \ndef cli():\n ''\n import getopt\n class BadUsage(Exception):pass\n \n _adjust_cli_sys_path()\n \n try :\n opts,args=getopt.getopt(sys.argv[1:],'bk:n:p:w')\n writing=False\n start_server=False\n open_browser=False\n port=0\n hostname='localhost'\n for opt,val in opts:\n if opt =='-b':\n start_server=True\n open_browser=True\n if opt =='-k':\n apropos(val)\n return\n if opt =='-p':\n start_server=True\n port=val\n if opt =='-w':\n writing=True\n if opt =='-n':\n start_server=True\n hostname=val\n \n if start_server:\n browse(port,hostname=hostname,open_browser=open_browser)\n return\n \n if not args:raise BadUsage\n for arg in args:\n if ispath(arg)and not os.path.exists(arg):\n print('file %r does not exist'%arg)\n break\n try :\n if ispath(arg)and os.path.isfile(arg):\n arg=importfile(arg)\n if writing:\n if ispath(arg)and os.path.isdir(arg):\n writedocs(arg)\n else :\n writedoc(arg)\n else :\n help.help(arg)\n except ErrorDuringImport as value:\n print(value)\n \n except (getopt.error,BadUsage):\n cmd=os.path.splitext(os.path.basename(sys.argv[0]))[0]\n print(\"\"\"pydoc - the Python documentation tool\n\n{cmd} ...\n Show text documentation on something. may be the name of a\n Python keyword, topic, function, module, or package, or a dotted\n reference to a class or function within a module or module in a\n package. If contains a '{sep}', it is used as the path to a\n Python source file to document. If name is 'keywords', 'topics',\n or 'modules', a listing of these things is displayed.\n\n{cmd} -k \n Search for a keyword in the synopsis lines of all available modules.\n\n{cmd} -n \n Start an HTTP server with the given hostname (default: localhost).\n\n{cmd} -p \n Start an HTTP server on the given port on the local machine. Port\n number 0 can be used to get an arbitrary unused port.\n\n{cmd} -b\n Start an HTTP server on an arbitrary unused port and open a Web browser\n to interactively browse documentation. This option can be used in\n combination with -n and/or -p.\n\n{cmd} -w ...\n Write out the HTML documentation for a module to a file in the current\n directory. If contains a '{sep}', it is treated as a filename; if\n it names a directory, documentation is written for all the contents.\n\"\"\".format(cmd=cmd,sep=os.sep))\n \nif __name__ =='__main__':\n cli()\n", ["builtins", "collections", "email.message", "getopt", "http.server", "importlib._bootstrap", "importlib._bootstrap_external", "importlib.machinery", "importlib.util", "inspect", "io", "os", "pkgutil", "platform", "pydoc_data.topics", "re", "reprlib", "select", "subprocess", "sys", "sysconfig", "tempfile", "textwrap", "threading", "time", "tokenize", "traceback", "tty", "urllib.parse", "warnings", "webbrowser"]], "py_compile": [".py", "''\n\n\n\n\nimport enum\nimport importlib._bootstrap_external\nimport importlib.machinery\nimport importlib.util\nimport os\nimport os.path\nimport sys\nimport traceback\n\n__all__=[\"compile\",\"main\",\"PyCompileError\",\"PycInvalidationMode\"]\n\n\nclass PyCompileError(Exception):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,exc_type,exc_value,file,msg=''):\n exc_type_name=exc_type.__name__\n if exc_type is SyntaxError:\n tbtext=''.join(traceback.format_exception_only(\n exc_type,exc_value))\n errmsg=tbtext.replace('File \"\"','File \"%s\"'%file)\n else :\n errmsg=\"Sorry: %s: %s\"%(exc_type_name,exc_value)\n \n Exception.__init__(self,msg or errmsg,exc_type_name,exc_value,file)\n \n self.exc_type_name=exc_type_name\n self.exc_value=exc_value\n self.file=file\n self.msg=msg or errmsg\n \n def __str__(self):\n return self.msg\n \n \nclass PycInvalidationMode(enum.Enum):\n TIMESTAMP=1\n CHECKED_HASH=2\n UNCHECKED_HASH=3\n \n \ndef _get_default_invalidation_mode():\n if os.environ.get('SOURCE_DATE_EPOCH'):\n return PycInvalidationMode.CHECKED_HASH\n else :\n return PycInvalidationMode.TIMESTAMP\n \n \ndef compile(file,cfile=None ,dfile=None ,doraise=False ,optimize=-1,\ninvalidation_mode=None ,quiet=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if invalidation_mode is None :\n invalidation_mode=_get_default_invalidation_mode()\n if cfile is None :\n if optimize >=0:\n optimization=optimize if optimize >=1 else ''\n cfile=importlib.util.cache_from_source(file,\n optimization=optimization)\n else :\n cfile=importlib.util.cache_from_source(file)\n if os.path.islink(cfile):\n msg=('{} is a symlink and will be changed into a regular file if '\n 'import writes a byte-compiled file to it')\n raise FileExistsError(msg.format(cfile))\n elif os.path.exists(cfile)and not os.path.isfile(cfile):\n msg=('{} is a non-regular file and will be changed into a regular '\n 'one if import writes a byte-compiled file to it')\n raise FileExistsError(msg.format(cfile))\n loader=importlib.machinery.SourceFileLoader('',file)\n source_bytes=loader.get_data(file)\n try :\n code=loader.source_to_code(source_bytes,dfile or file,\n _optimize=optimize)\n except Exception as err:\n py_exc=PyCompileError(err.__class__,err,dfile or file)\n if quiet <2:\n if doraise:\n raise py_exc\n else :\n sys.stderr.write(py_exc.msg+'\\n')\n return\n try :\n dirname=os.path.dirname(cfile)\n if dirname:\n os.makedirs(dirname)\n except FileExistsError:\n pass\n if invalidation_mode ==PycInvalidationMode.TIMESTAMP:\n source_stats=loader.path_stats(file)\n bytecode=importlib._bootstrap_external._code_to_timestamp_pyc(\n code,source_stats['mtime'],source_stats['size'])\n else :\n source_hash=importlib.util.source_hash(source_bytes)\n bytecode=importlib._bootstrap_external._code_to_hash_pyc(\n code,\n source_hash,\n (invalidation_mode ==PycInvalidationMode.CHECKED_HASH),\n )\n mode=importlib._bootstrap_external._calc_mode(file)\n importlib._bootstrap_external._write_atomic(cfile,bytecode,mode)\n return cfile\n \n \ndef main(args=None ):\n ''\n\n\n\n\n\n\n\n\n \n if args is None :\n args=sys.argv[1:]\n rv=0\n if args ==['-']:\n while True :\n filename=sys.stdin.readline()\n if not filename:\n break\n filename=filename.rstrip('\\n')\n try :\n compile(filename,doraise=True )\n except PyCompileError as error:\n rv=1\n sys.stderr.write(\"%s\\n\"%error.msg)\n except OSError as error:\n rv=1\n sys.stderr.write(\"%s\\n\"%error)\n else :\n for filename in args:\n try :\n compile(filename,doraise=True )\n except PyCompileError as error:\n \n rv=1\n sys.stderr.write(\"%s\\n\"%error.msg)\n return rv\n \nif __name__ ==\"__main__\":\n sys.exit(main())\n", ["enum", "importlib._bootstrap_external", "importlib.machinery", "importlib.util", "os", "os.path", "sys", "traceback"]], "queue": [".py", "''\n\nimport threading\nimport types\nfrom collections import deque\nfrom heapq import heappush,heappop\nfrom time import monotonic as time\ntry :\n from _queue import SimpleQueue\nexcept ImportError:\n SimpleQueue=None\n \n__all__=['Empty','Full','Queue','PriorityQueue','LifoQueue','SimpleQueue']\n\n\ntry :\n from _queue import Empty\nexcept ImportError:\n class Empty(Exception):\n ''\n pass\n \nclass Full(Exception):\n ''\n pass\n \n \nclass Queue:\n ''\n\n\n \n \n def __init__(self,maxsize=0):\n self.maxsize=maxsize\n self._init(maxsize)\n \n \n \n \n \n self.mutex=threading.Lock()\n \n \n \n self.not_empty=threading.Condition(self.mutex)\n \n \n \n self.not_full=threading.Condition(self.mutex)\n \n \n \n self.all_tasks_done=threading.Condition(self.mutex)\n self.unfinished_tasks=0\n \n def task_done(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n with self.all_tasks_done:\n unfinished=self.unfinished_tasks -1\n if unfinished <=0:\n if unfinished <0:\n raise ValueError('task_done() called too many times')\n self.all_tasks_done.notify_all()\n self.unfinished_tasks=unfinished\n \n def join(self):\n ''\n\n\n\n\n\n\n \n with self.all_tasks_done:\n while self.unfinished_tasks:\n self.all_tasks_done.wait()\n \n def qsize(self):\n ''\n with self.mutex:\n return self._qsize()\n \n def empty(self):\n ''\n\n\n\n\n\n\n\n\n \n with self.mutex:\n return not self._qsize()\n \n def full(self):\n ''\n\n\n\n\n\n \n with self.mutex:\n return 0 0:\n if not block:\n if self._qsize()>=self.maxsize:\n raise Full\n elif timeout is None :\n while self._qsize()>=self.maxsize:\n self.not_full.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else :\n endtime=time()+timeout\n while self._qsize()>=self.maxsize:\n remaining=endtime -time()\n if remaining <=0.0:\n raise Full\n self.not_full.wait(remaining)\n self._put(item)\n self.unfinished_tasks +=1\n self.not_empty.notify()\n \n def get(self,block=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n \n with self.not_empty:\n if not block:\n if not self._qsize():\n raise Empty\n elif timeout is None :\n while not self._qsize():\n self.not_empty.wait()\n elif timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n else :\n endtime=time()+timeout\n while not self._qsize():\n remaining=endtime -time()\n if remaining <=0.0:\n raise Empty\n self.not_empty.wait(remaining)\n item=self._get()\n self.not_full.notify()\n return item\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False )\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False )\n \n \n \n \n \n \n def _init(self,maxsize):\n self.queue=deque()\n \n def _qsize(self):\n return len(self.queue)\n \n \n def _put(self,item):\n self.queue.append(item)\n \n \n def _get(self):\n return self.queue.popleft()\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nclass PriorityQueue(Queue):\n ''\n\n\n \n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n heappush(self.queue,item)\n \n def _get(self):\n return heappop(self.queue)\n \n \nclass LifoQueue(Queue):\n ''\n \n def _init(self,maxsize):\n self.queue=[]\n \n def _qsize(self):\n return len(self.queue)\n \n def _put(self,item):\n self.queue.append(item)\n \n def _get(self):\n return self.queue.pop()\n \n \nclass _PySimpleQueue:\n ''\n\n\n \n \n \n \n \n \n def __init__(self):\n self._queue=deque()\n self._count=threading.Semaphore(0)\n \n def put(self,item,block=True ,timeout=None ):\n ''\n\n\n\n \n self._queue.append(item)\n self._count.release()\n \n def get(self,block=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n \n if timeout is not None and timeout <0:\n raise ValueError(\"'timeout' must be a non-negative number\")\n if not self._count.acquire(block,timeout):\n raise Empty\n return self._queue.popleft()\n \n def put_nowait(self,item):\n ''\n\n\n\n \n return self.put(item,block=False )\n \n def get_nowait(self):\n ''\n\n\n\n \n return self.get(block=False )\n \n def empty(self):\n ''\n return len(self._queue)==0\n \n def qsize(self):\n ''\n return len(self._queue)\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \nif SimpleQueue is None :\n SimpleQueue=_PySimpleQueue\n", ["_queue", "collections", "heapq", "threading", "time", "types"]], "quopri": [".py", "#! /usr/bin/env python3\n\n\"\"\"Conversions to/from quoted-printable transport encoding as per RFC 1521.\"\"\"\n\n\n\n__all__=[\"encode\",\"decode\",\"encodestring\",\"decodestring\"]\n\nESCAPE=b'='\nMAXLINESIZE=76\nHEX=b'0123456789ABCDEF'\nEMPTYSTRING=b''\n\ntry :\n from binascii import a2b_qp,b2a_qp\nexcept ImportError:\n a2b_qp=None\n b2a_qp=None\n \n \ndef needsquoting(c,quotetabs,header):\n ''\n\n\n\n\n \n assert isinstance(c,bytes)\n if c in b' \\t':\n return quotetabs\n \n if c ==b'_':\n return header\n return c ==ESCAPE or not (b' '<=c <=b'~')\n \ndef quote(c):\n ''\n assert isinstance(c,bytes)and len(c)==1\n c=ord(c)\n return ESCAPE+bytes((HEX[c //16],HEX[c %16]))\n \n \n \ndef encode(input,output,quotetabs,header=False ):\n ''\n\n\n\n\n\n \n \n if b2a_qp is not None :\n data=input.read()\n odata=b2a_qp(data,quotetabs=quotetabs,header=header)\n output.write(odata)\n return\n \n def write(s,output=output,lineEnd=b'\\n'):\n \n \n if s and s[-1:]in b' \\t':\n output.write(s[:-1]+quote(s[-1:])+lineEnd)\n elif s ==b'.':\n output.write(quote(s)+lineEnd)\n else :\n output.write(s+lineEnd)\n \n prevline=None\n while 1:\n line=input.readline()\n if not line:\n break\n outline=[]\n \n stripped=b''\n if line[-1:]==b'\\n':\n line=line[:-1]\n stripped=b'\\n'\n \n for c in line:\n c=bytes((c,))\n if needsquoting(c,quotetabs,header):\n c=quote(c)\n if header and c ==b' ':\n outline.append(b'_')\n else :\n outline.append(c)\n \n if prevline is not None :\n write(prevline)\n \n \n thisline=EMPTYSTRING.join(outline)\n while len(thisline)>MAXLINESIZE:\n \n \n write(thisline[:MAXLINESIZE -1],lineEnd=b'=\\n')\n thisline=thisline[MAXLINESIZE -1:]\n \n prevline=thisline\n \n if prevline is not None :\n write(prevline,lineEnd=stripped)\n \ndef encodestring(s,quotetabs=False ,header=False ):\n if b2a_qp is not None :\n return b2a_qp(s,quotetabs=quotetabs,header=header)\n from io import BytesIO\n infp=BytesIO(s)\n outfp=BytesIO()\n encode(infp,outfp,quotetabs,header)\n return outfp.getvalue()\n \n \n \ndef decode(input,output,header=False ):\n ''\n\n \n \n if a2b_qp is not None :\n data=input.read()\n odata=a2b_qp(data,header=header)\n output.write(odata)\n return\n \n new=b''\n while 1:\n line=input.readline()\n if not line:break\n i,n=0,len(line)\n if n >0 and line[n -1:n]==b'\\n':\n partial=0 ;n=n -1\n \n while n >0 and line[n -1:n]in b\" \\t\\r\":\n n=n -1\n else :\n partial=1\n while i ...) The substring matched by the group is accessible by name.\n (?P=name) Matches the text matched earlier by the group named name.\n (?#...) A comment; ignored.\n (?=...) Matches if ... matches next, but doesn't consume the string.\n (?!...) Matches if ... doesn't match next.\n (?<=...) Matches if preceded by ... (must be fixed length).\n (?1:\n res=f'~({res})'\n else :\n res=f'~{res}'\n return res\n __str__=object.__str__\n \nglobals().update(RegexFlag.__members__)\n\n\nerror=sre_compile.error\n\n\n\n\ndef match(pattern,string,flags=0):\n ''\n \n return _compile(pattern,flags).match(string)\n \ndef fullmatch(pattern,string,flags=0):\n ''\n \n return _compile(pattern,flags).fullmatch(string)\n \ndef search(pattern,string,flags=0):\n ''\n \n return _compile(pattern,flags).search(string)\n \ndef sub(pattern,repl,string,count=0,flags=0):\n ''\n\n\n\n\n \n return _compile(pattern,flags).sub(repl,string,count)\n \ndef subn(pattern,repl,string,count=0,flags=0):\n ''\n\n\n\n\n\n\n \n return _compile(pattern,flags).subn(repl,string,count)\n \ndef split(pattern,string,maxsplit=0,flags=0):\n ''\n\n\n\n\n\n \n return _compile(pattern,flags).split(string,maxsplit)\n \ndef findall(pattern,string,flags=0):\n ''\n\n\n\n\n\n \n return _compile(pattern,flags).findall(string)\n \ndef finditer(pattern,string,flags=0):\n ''\n\n\n \n return _compile(pattern,flags).finditer(string)\n \ndef compile(pattern,flags=0):\n ''\n return _compile(pattern,flags)\n \ndef purge():\n ''\n _cache.clear()\n _compile_repl.cache_clear()\n \ndef template(pattern,flags=0):\n ''\n return _compile(pattern,flags |T)\n \n \n \n \n \n \n_special_chars_map={i:'\\\\'+chr(i)for i in b'()[]{}?*+-|^$\\\\.&~# \\t\\n\\r\\v\\f'}\n\ndef escape(pattern):\n ''\n\n \n if isinstance(pattern,str):\n return pattern.translate(_special_chars_map)\n else :\n pattern=str(pattern,'latin1')\n return pattern.translate(_special_chars_map).encode('latin1')\n \nPattern=type(sre_compile.compile('',0))\nMatch=type(sre_compile.compile('',0).match(''))\n\n\n\n\n_cache={}\n\n_MAXCACHE=512\ndef _compile(pattern,flags):\n\n if isinstance(flags,RegexFlag):\n flags=flags.value\n try :\n return _cache[type(pattern),pattern,flags]\n except KeyError:\n pass\n if isinstance(pattern,Pattern):\n if flags:\n raise ValueError(\n \"cannot process flags argument with a compiled pattern\")\n return pattern\n if not sre_compile.isstring(pattern):\n raise TypeError(\"first argument must be string or compiled pattern\")\n p=sre_compile.compile(pattern,flags)\n if not (flags&DEBUG):\n if len(_cache)>=_MAXCACHE:\n \n try :\n del _cache[next(iter(_cache))]\n except (StopIteration,RuntimeError,KeyError):\n pass\n _cache[type(pattern),pattern,flags]=p\n return p\n \n@functools.lru_cache(_MAXCACHE)\ndef _compile_repl(repl,pattern):\n\n return sre_parse.parse_template(repl,pattern)\n \ndef _expand(pattern,match,template):\n\n template=sre_parse.parse_template(template,pattern)\n return sre_parse.expand_template(template,match)\n \ndef _subx(pattern,template):\n\n template=_compile_repl(template,pattern)\n if not template[0]and len(template[1])==1:\n \n return template[1][0]\n def filter(match,template=template):\n return sre_parse.expand_template(template,match)\n return filter\n \n \n \nimport copyreg\n\ndef _pickle(p):\n return _compile,(p.pattern,p.flags)\n \ncopyreg.pickle(Pattern,_pickle,_compile)\n\n\n\n\nclass Scanner:\n def __init__(self,lexicon,flags=0):\n from sre_constants import BRANCH,SUBPATTERN\n if isinstance(flags,RegexFlag):\n flags=flags.value\n self.lexicon=lexicon\n \n p=[]\n s=sre_parse.State()\n s.flags=flags\n for phrase,action in lexicon:\n gid=s.opengroup()\n p.append(sre_parse.SubPattern(s,[\n (SUBPATTERN,(gid,0,0,sre_parse.parse(phrase,flags))),\n ]))\n s.closegroup(gid,p[-1])\n p=sre_parse.SubPattern(s,[(BRANCH,(None ,p))])\n self.scanner=sre_compile.compile(p)\n def scan(self,string):\n result=[]\n append=result.append\n match=self.scanner.scanner(string).match\n i=0\n while True :\n m=match()\n if not m:\n break\n j=m.end()\n if i ==j:\n break\n action=self.lexicon[m.lastindex -1][1]\n if callable(action):\n self.match=m\n action=action(self,m.group())\n if action is not None :\n append(action)\n i=j\n return result,string[i:]\n", ["_locale", "copyreg", "enum", "functools", "sre_compile", "sre_constants", "sre_parse"]], "reprlib": [".py", "''\n\n__all__=[\"Repr\",\"repr\",\"recursive_repr\"]\n\nimport builtins\nfrom itertools import islice\nfrom _thread import get_ident\n\ndef recursive_repr(fillvalue='...'):\n ''\n \n def decorating_function(user_function):\n repr_running=set()\n \n def wrapper(self):\n key=id(self),get_ident()\n if key in repr_running:\n return fillvalue\n repr_running.add(key)\n try :\n result=user_function(self)\n finally :\n repr_running.discard(key)\n return result\n \n \n wrapper.__module__=getattr(user_function,'__module__')\n wrapper.__doc__=getattr(user_function,'__doc__')\n wrapper.__name__=getattr(user_function,'__name__')\n wrapper.__qualname__=getattr(user_function,'__qualname__')\n wrapper.__annotations__=getattr(user_function,'__annotations__',{})\n return wrapper\n \n return decorating_function\n \nclass Repr:\n\n def __init__(self):\n self.maxlevel=6\n self.maxtuple=6\n self.maxlist=6\n self.maxarray=5\n self.maxdict=4\n self.maxset=6\n self.maxfrozenset=6\n self.maxdeque=6\n self.maxstring=30\n self.maxlong=40\n self.maxother=30\n \n def repr(self,x):\n return self.repr1(x,self.maxlevel)\n \n def repr1(self,x,level):\n typename=type(x).__name__\n if ' 'in typename:\n parts=typename.split()\n typename='_'.join(parts)\n if hasattr(self,'repr_'+typename):\n return getattr(self,'repr_'+typename)(x,level)\n else :\n return self.repr_instance(x,level)\n \n def _repr_iterable(self,x,level,left,right,maxiter,trail=''):\n n=len(x)\n if level <=0 and n:\n s='...'\n else :\n newlevel=level -1\n repr1=self.repr1\n pieces=[repr1(elem,newlevel)for elem in islice(x,maxiter)]\n if n >maxiter:pieces.append('...')\n s=', '.join(pieces)\n if n ==1 and trail:right=trail+right\n return '%s%s%s'%(left,s,right)\n \n def repr_tuple(self,x,level):\n return self._repr_iterable(x,level,'(',')',self.maxtuple,',')\n \n def repr_list(self,x,level):\n return self._repr_iterable(x,level,'[',']',self.maxlist)\n \n def repr_array(self,x,level):\n if not x:\n return \"array('%s')\"%x.typecode\n header=\"array('%s', [\"%x.typecode\n return self._repr_iterable(x,level,header,'])',self.maxarray)\n \n def repr_set(self,x,level):\n if not x:\n return 'set()'\n x=_possibly_sorted(x)\n return self._repr_iterable(x,level,'{','}',self.maxset)\n \n def repr_frozenset(self,x,level):\n if not x:\n return 'frozenset()'\n x=_possibly_sorted(x)\n return self._repr_iterable(x,level,'frozenset({','})',\n self.maxfrozenset)\n \n def repr_deque(self,x,level):\n return self._repr_iterable(x,level,'deque([','])',self.maxdeque)\n \n def repr_dict(self,x,level):\n n=len(x)\n if n ==0:return '{}'\n if level <=0:return '{...}'\n newlevel=level -1\n repr1=self.repr1\n pieces=[]\n for key in islice(_possibly_sorted(x),self.maxdict):\n keyrepr=repr1(key,newlevel)\n valrepr=repr1(x[key],newlevel)\n pieces.append('%s: %s'%(keyrepr,valrepr))\n if n >self.maxdict:pieces.append('...')\n s=', '.join(pieces)\n return '{%s}'%(s,)\n \n def repr_str(self,x,level):\n s=builtins.repr(x[:self.maxstring])\n if len(s)>self.maxstring:\n i=max(0,(self.maxstring -3)//2)\n j=max(0,self.maxstring -3 -i)\n s=builtins.repr(x[:i]+x[len(x)-j:])\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n def repr_int(self,x,level):\n s=builtins.repr(x)\n if len(s)>self.maxlong:\n i=max(0,(self.maxlong -3)//2)\n j=max(0,self.maxlong -3 -i)\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n def repr_instance(self,x,level):\n try :\n s=builtins.repr(x)\n \n \n except Exception:\n return '<%s instance at %#x>'%(x.__class__.__name__,id(x))\n if len(s)>self.maxother:\n i=max(0,(self.maxother -3)//2)\n j=max(0,self.maxother -3 -i)\n s=s[:i]+'...'+s[len(s)-j:]\n return s\n \n \ndef _possibly_sorted(x):\n\n\n\n try :\n return sorted(x)\n except Exception:\n return list(x)\n \naRepr=Repr()\nrepr=aRepr.repr\n", ["_thread", "builtins", "itertools"]], "select": [".py", "''\n\n\n\n\nimport errno\nimport os\n\nclass error(Exception):\n pass\n \nALL=None\n\n_exception_map={}\n\ndef _map_exception(exc,circumstance=ALL):\n try :\n mapped_exception=_exception_map[(exc.__class__,circumstance)]\n mapped_exception.java_exception=exc\n return mapped_exception\n except KeyError:\n return error(-1,'Unmapped java exception: <%s:%s>'%(exc.toString(),circumstance))\n \nPOLLIN=1\nPOLLOUT=2\n\n\n\n\n\nPOLLPRI=4\nPOLLERR=8\nPOLLHUP=16\nPOLLNVAL=32\n\ndef _getselectable(selectable_object):\n try :\n channel=selectable_object.getchannel()\n except :\n try :\n channel=selectable_object.fileno().getChannel()\n except :\n raise TypeError(\"Object '%s' is not watchable\"%selectable_object,\n errno.ENOTSOCK)\n \n if channel and not isinstance(channel,java.nio.channels.SelectableChannel):\n raise TypeError(\"Object '%s' is not watchable\"%selectable_object,\n errno.ENOTSOCK)\n return channel\n \n \nclass Selector:\n\n def close(self):\n pass\n \n def keys(self):\n return []\n \n def select(self,timeout=None ):\n return []\n \n def selectedKeys(self):\n class SelectedKeys:\n def iterator(self):\n return []\n return SelectedKeys()\n \n def selectNow(self,timeout=None ):\n return []\n \nclass poll:\n\n def __init__(self):\n self.selector=Selector()\n self.chanmap={}\n self.unconnected_sockets=[]\n \n def _register_channel(self,socket_object,channel,mask):\n jmask=0\n if mask&POLLIN:\n \n if channel.validOps()&OP_ACCEPT:\n jmask=OP_ACCEPT\n else :\n jmask=OP_READ\n if mask&POLLOUT:\n if channel.validOps()&OP_WRITE:\n jmask |=OP_WRITE\n if channel.validOps()&OP_CONNECT:\n jmask |=OP_CONNECT\n selectionkey=channel.register(self.selector,jmask)\n self.chanmap[channel]=(socket_object,selectionkey)\n \n def _check_unconnected_sockets(self):\n temp_list=[]\n for socket_object,mask in self.unconnected_sockets:\n channel=_getselectable(socket_object)\n if channel is not None :\n self._register_channel(socket_object,channel,mask)\n else :\n temp_list.append((socket_object,mask))\n self.unconnected_sockets=temp_list\n \n def register(self,socket_object,mask=POLLIN |POLLOUT |POLLPRI):\n try :\n channel=_getselectable(socket_object)\n if channel is None :\n \n \n self.unconnected_sockets.append((socket_object,mask))\n return\n self._register_channel(socket_object,channel,mask)\n except BaseException as exc:\n raise _map_exception(exc)\n \n def unregister(self,socket_object):\n try :\n channel=_getselectable(socket_object)\n self.chanmap[channel][1].cancel()\n del self.chanmap[channel]\n except BaseException as exc:\n raise _map_exception(exc)\n \n def _dopoll(self,timeout):\n if timeout is None or timeout <0:\n self.selector.select()\n else :\n try :\n timeout=int(timeout)\n if not timeout:\n self.selector.selectNow()\n else :\n \n self.selector.select(timeout)\n except ValueError as vx:\n raise error(\"poll timeout must be a number of milliseconds or None\",errno.EINVAL)\n \n return self.selector.selectedKeys()\n \n def poll(self,timeout=None ):\n return []\n \n def _deregister_all(self):\n try :\n for k in self.selector.keys():\n k.cancel()\n \n self.selector.selectNow()\n except BaseException as exc:\n raise _map_exception(exc)\n \n def close(self):\n try :\n self._deregister_all()\n self.selector.close()\n except BaseException as exc:\n raise _map_exception(exc)\n \ndef _calcselecttimeoutvalue(value):\n if value is None :\n return None\n try :\n floatvalue=float(value)\n except Exception as x:\n raise TypeError(\"Select timeout value must be a number or None\")\n if value <0:\n raise error(\"Select timeout value cannot be negative\",errno.EINVAL)\n if floatvalue <0.000001:\n return 0\n return int(floatvalue *1000)\n \n \n \n \nclass poll_object_cache:\n\n def __init__(self):\n self.is_windows=os.name =='nt'\n if self.is_windows:\n self.poll_object_queue=Queue.Queue()\n import atexit\n atexit.register(self.finalize)\n \n def get_poll_object(self):\n if not self.is_windows:\n return poll()\n try :\n return self.poll_object_queue.get(False )\n except Queue.Empty:\n return poll()\n \n def release_poll_object(self,pobj):\n if self.is_windows:\n pobj._deregister_all()\n self.poll_object_queue.put(pobj)\n else :\n pobj.close()\n \n def finalize(self):\n if self.is_windows:\n while True :\n try :\n p=self.poll_object_queue.get(False )\n p.close()\n except Queue.Empty:\n return\n \n_poll_object_cache=poll_object_cache()\n\ndef native_select(read_fd_list,write_fd_list,outofband_fd_list,timeout=None ):\n timeout=_calcselecttimeoutvalue(timeout)\n \n pobj=_poll_object_cache.get_poll_object()\n try :\n registered_for_read={}\n \n for fd in read_fd_list:\n pobj.register(fd,POLLIN)\n registered_for_read[fd]=1\n \n for fd in write_fd_list:\n if fd in registered_for_read:\n \n pobj.register(fd,POLLIN |POLLOUT)\n else :\n pobj.register(fd,POLLOUT)\n results=pobj.poll(timeout)\n \n read_ready_list,write_ready_list,oob_ready_list=[],[],[]\n for fd,mask in results:\n if mask&POLLIN:\n read_ready_list.append(fd)\n if mask&POLLOUT:\n write_ready_list.append(fd)\n return read_ready_list,write_ready_list,oob_ready_list\n finally :\n _poll_object_cache.release_poll_object(pobj)\n \nselect=native_select\n\ndef cpython_compatible_select(read_fd_list,write_fd_list,outofband_fd_list,timeout=None ):\n\n\n modified_channels=[]\n try :\n for socket_list in [read_fd_list,write_fd_list,outofband_fd_list]:\n for s in socket_list:\n channel=_getselectable(s)\n if channel.isBlocking():\n modified_channels.append(channel)\n channel.configureBlocking(0)\n return native_select(read_fd_list,write_fd_list,outofband_fd_list,timeout)\n finally :\n for channel in modified_channels:\n channel.configureBlocking(1)\n", ["atexit", "errno", "os"]], "selectors": [".py", "''\n\n\n\n\n\n\nfrom abc import ABCMeta,abstractmethod\nfrom collections import namedtuple\nfrom collections.abc import Mapping\nimport math\nimport select\nimport sys\n\n\n\nEVENT_READ=(1 <<0)\nEVENT_WRITE=(1 <<1)\n\n\ndef _fileobj_to_fd(fileobj):\n ''\n\n\n\n\n\n\n\n\n\n \n if isinstance(fileobj,int):\n fd=fileobj\n else :\n try :\n fd=int(fileobj.fileno())\n except (AttributeError,TypeError,ValueError):\n raise ValueError(\"Invalid file object: \"\n \"{!r}\".format(fileobj))from None\n if fd <0:\n raise ValueError(\"Invalid file descriptor: {}\".format(fd))\n return fd\n \n \nSelectorKey=namedtuple('SelectorKey',['fileobj','fd','events','data'])\n\nSelectorKey.__doc__=\"\"\"SelectorKey(fileobj, fd, events, data)\n\n Object used to associate a file object to its backing\n file descriptor, selected event mask, and attached data.\n\"\"\"\nif sys.version_info >=(3,5):\n SelectorKey.fileobj.__doc__='File object registered.'\n SelectorKey.fd.__doc__='Underlying file descriptor.'\n SelectorKey.events.__doc__='Events that must be waited for on this file object.'\n SelectorKey.data.__doc__=('''Optional opaque data associated to this file object.\n For example, this could be used to store a per-client session ID.''')\n \n \nclass _SelectorMapping(Mapping):\n ''\n \n def __init__(self,selector):\n self._selector=selector\n \n def __len__(self):\n return len(self._selector._fd_to_key)\n \n def __getitem__(self,fileobj):\n try :\n fd=self._selector._fileobj_lookup(fileobj)\n return self._selector._fd_to_key[fd]\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n \n def __iter__(self):\n return iter(self._selector._fd_to_key)\n \n \nclass BaseSelector(metaclass=ABCMeta):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n @abstractmethod\n def register(self,fileobj,events,data=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n @abstractmethod\n def unregister(self,fileobj):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def modify(self,fileobj,events,data=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n self.unregister(fileobj)\n return self.register(fileobj,events,data)\n \n @abstractmethod\n def select(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n raise NotImplementedError\n \n def close(self):\n ''\n\n\n \n pass\n \n def get_key(self,fileobj):\n ''\n\n\n\n \n mapping=self.get_map()\n if mapping is None :\n raise RuntimeError('Selector is closed')\n try :\n return mapping[fileobj]\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n \n @abstractmethod\n def get_map(self):\n ''\n raise NotImplementedError\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n self.close()\n \n \nclass _BaseSelectorImpl(BaseSelector):\n ''\n \n def __init__(self):\n \n self._fd_to_key={}\n \n self._map=_SelectorMapping(self)\n \n def _fileobj_lookup(self,fileobj):\n ''\n\n\n\n\n\n\n \n try :\n return _fileobj_to_fd(fileobj)\n except ValueError:\n \n for key in self._fd_to_key.values():\n if key.fileobj is fileobj:\n return key.fd\n \n raise\n \n def register(self,fileobj,events,data=None ):\n if (not events)or (events&~(EVENT_READ |EVENT_WRITE)):\n raise ValueError(\"Invalid events: {!r}\".format(events))\n \n key=SelectorKey(fileobj,self._fileobj_lookup(fileobj),events,data)\n \n if key.fd in self._fd_to_key:\n raise KeyError(\"{!r} (FD {}) is already registered\"\n .format(fileobj,key.fd))\n \n self._fd_to_key[key.fd]=key\n return key\n \n def unregister(self,fileobj):\n try :\n key=self._fd_to_key.pop(self._fileobj_lookup(fileobj))\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n return key\n \n def modify(self,fileobj,events,data=None ):\n try :\n key=self._fd_to_key[self._fileobj_lookup(fileobj)]\n except KeyError:\n raise KeyError(\"{!r} is not registered\".format(fileobj))from None\n if events !=key.events:\n self.unregister(fileobj)\n key=self.register(fileobj,events,data)\n elif data !=key.data:\n \n key=key._replace(data=data)\n self._fd_to_key[key.fd]=key\n return key\n \n def close(self):\n self._fd_to_key.clear()\n self._map=None\n \n def get_map(self):\n return self._map\n \n def _key_from_fd(self,fd):\n ''\n\n\n\n\n\n\n \n try :\n return self._fd_to_key[fd]\n except KeyError:\n return None\n \n \nclass SelectSelector(_BaseSelectorImpl):\n ''\n \n def __init__(self):\n super().__init__()\n self._readers=set()\n self._writers=set()\n \n def register(self,fileobj,events,data=None ):\n key=super().register(fileobj,events,data)\n if events&EVENT_READ:\n self._readers.add(key.fd)\n if events&EVENT_WRITE:\n self._writers.add(key.fd)\n return key\n \n def unregister(self,fileobj):\n key=super().unregister(fileobj)\n self._readers.discard(key.fd)\n self._writers.discard(key.fd)\n return key\n \n if sys.platform =='win32':\n def _select(self,r,w,_,timeout=None ):\n r,w,x=select.select(r,w,w,timeout)\n return r,w+x,[]\n else :\n _select=select.select\n \n def select(self,timeout=None ):\n timeout=None if timeout is None else max(timeout,0)\n ready=[]\n try :\n r,w,_=self._select(self._readers,self._writers,[],timeout)\n except InterruptedError:\n return ready\n r=set(r)\n w=set(w)\n for fd in r |w:\n events=0\n if fd in r:\n events |=EVENT_READ\n if fd in w:\n events |=EVENT_WRITE\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n \nclass _PollLikeSelector(_BaseSelectorImpl):\n ''\n _selector_cls=None\n _EVENT_READ=None\n _EVENT_WRITE=None\n \n def __init__(self):\n super().__init__()\n self._selector=self._selector_cls()\n \n def register(self,fileobj,events,data=None ):\n key=super().register(fileobj,events,data)\n poller_events=0\n if events&EVENT_READ:\n poller_events |=self._EVENT_READ\n if events&EVENT_WRITE:\n poller_events |=self._EVENT_WRITE\n try :\n self._selector.register(key.fd,poller_events)\n except :\n super().unregister(fileobj)\n raise\n return key\n \n def unregister(self,fileobj):\n key=super().unregister(fileobj)\n try :\n self._selector.unregister(key.fd)\n except OSError:\n \n \n pass\n return key\n \n def modify(self,fileobj,events,data=None ):\n try :\n key=self._fd_to_key[self._fileobj_lookup(fileobj)]\n except KeyError:\n raise KeyError(f\"{fileobj!r} is not registered\")from None\n \n changed=False\n if events !=key.events:\n selector_events=0\n if events&EVENT_READ:\n selector_events |=self._EVENT_READ\n if events&EVENT_WRITE:\n selector_events |=self._EVENT_WRITE\n try :\n self._selector.modify(key.fd,selector_events)\n except :\n super().unregister(fileobj)\n raise\n changed=True\n if data !=key.data:\n changed=True\n \n if changed:\n key=key._replace(events=events,data=data)\n self._fd_to_key[key.fd]=key\n return key\n \n def select(self,timeout=None ):\n \n \n if timeout is None :\n timeout=None\n elif timeout <=0:\n timeout=0\n else :\n \n \n timeout=math.ceil(timeout *1e3)\n ready=[]\n try :\n fd_event_list=self._selector.poll(timeout)\n except InterruptedError:\n return ready\n for fd,event in fd_event_list:\n events=0\n if event&~self._EVENT_READ:\n events |=EVENT_WRITE\n if event&~self._EVENT_WRITE:\n events |=EVENT_READ\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n \nif hasattr(select,'poll'):\n\n class PollSelector(_PollLikeSelector):\n ''\n _selector_cls=select.poll\n _EVENT_READ=select.POLLIN\n _EVENT_WRITE=select.POLLOUT\n \n \nif hasattr(select,'epoll'):\n\n class EpollSelector(_PollLikeSelector):\n ''\n _selector_cls=select.epoll\n _EVENT_READ=select.EPOLLIN\n _EVENT_WRITE=select.EPOLLOUT\n \n def fileno(self):\n return self._selector.fileno()\n \n def select(self,timeout=None ):\n if timeout is None :\n timeout=-1\n elif timeout <=0:\n timeout=0\n else :\n \n \n timeout=math.ceil(timeout *1e3)*1e-3\n \n \n \n \n max_ev=max(len(self._fd_to_key),1)\n \n ready=[]\n try :\n fd_event_list=self._selector.poll(timeout,max_ev)\n except InterruptedError:\n return ready\n for fd,event in fd_event_list:\n events=0\n if event&~select.EPOLLIN:\n events |=EVENT_WRITE\n if event&~select.EPOLLOUT:\n events |=EVENT_READ\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n def close(self):\n self._selector.close()\n super().close()\n \n \nif hasattr(select,'devpoll'):\n\n class DevpollSelector(_PollLikeSelector):\n ''\n _selector_cls=select.devpoll\n _EVENT_READ=select.POLLIN\n _EVENT_WRITE=select.POLLOUT\n \n def fileno(self):\n return self._selector.fileno()\n \n def close(self):\n self._selector.close()\n super().close()\n \n \nif hasattr(select,'kqueue'):\n\n class KqueueSelector(_BaseSelectorImpl):\n ''\n \n def __init__(self):\n super().__init__()\n self._selector=select.kqueue()\n \n def fileno(self):\n return self._selector.fileno()\n \n def register(self,fileobj,events,data=None ):\n key=super().register(fileobj,events,data)\n try :\n if events&EVENT_READ:\n kev=select.kevent(key.fd,select.KQ_FILTER_READ,\n select.KQ_EV_ADD)\n self._selector.control([kev],0,0)\n if events&EVENT_WRITE:\n kev=select.kevent(key.fd,select.KQ_FILTER_WRITE,\n select.KQ_EV_ADD)\n self._selector.control([kev],0,0)\n except :\n super().unregister(fileobj)\n raise\n return key\n \n def unregister(self,fileobj):\n key=super().unregister(fileobj)\n if key.events&EVENT_READ:\n kev=select.kevent(key.fd,select.KQ_FILTER_READ,\n select.KQ_EV_DELETE)\n try :\n self._selector.control([kev],0,0)\n except OSError:\n \n \n pass\n if key.events&EVENT_WRITE:\n kev=select.kevent(key.fd,select.KQ_FILTER_WRITE,\n select.KQ_EV_DELETE)\n try :\n self._selector.control([kev],0,0)\n except OSError:\n \n pass\n return key\n \n def select(self,timeout=None ):\n timeout=None if timeout is None else max(timeout,0)\n \n \n \n max_ev=max(len(self._fd_to_key),1)\n ready=[]\n try :\n kev_list=self._selector.control(None ,max_ev,timeout)\n except InterruptedError:\n return ready\n for kev in kev_list:\n fd=kev.ident\n flag=kev.filter\n events=0\n if flag ==select.KQ_FILTER_READ:\n events |=EVENT_READ\n if flag ==select.KQ_FILTER_WRITE:\n events |=EVENT_WRITE\n \n key=self._key_from_fd(fd)\n if key:\n ready.append((key,events&key.events))\n return ready\n \n def close(self):\n self._selector.close()\n super().close()\n \n \ndef _can_use(method):\n ''\n \n \n selector=getattr(select,method,None )\n if selector is None :\n \n return False\n \n \n try :\n selector_obj=selector()\n if method =='poll':\n \n selector_obj.poll(0)\n else :\n \n selector_obj.close()\n return True\n except OSError:\n return False\n \n \n \n \n \nif _can_use('kqueue'):\n DefaultSelector=KqueueSelector\nelif _can_use('epoll'):\n DefaultSelector=EpollSelector\nelif _can_use('devpoll'):\n DefaultSelector=DevpollSelector\nelif _can_use('poll'):\n DefaultSelector=PollSelector\nelse :\n DefaultSelector=SelectSelector\n", ["abc", "collections", "collections.abc", "math", "select", "sys"]], "shlex": [".py", "''\n\n\n\n\n\n\n\n\nimport os\nimport re\nimport sys\nfrom collections import deque\n\nfrom io import StringIO\n\n__all__=[\"shlex\",\"split\",\"quote\",\"join\"]\n\nclass shlex:\n ''\n def __init__(self,instream=None ,infile=None ,posix=False ,\n punctuation_chars=False ):\n if isinstance(instream,str):\n instream=StringIO(instream)\n if instream is not None :\n self.instream=instream\n self.infile=infile\n else :\n self.instream=sys.stdin\n self.infile=None\n self.posix=posix\n if posix:\n self.eof=None\n else :\n self.eof=''\n self.commenters='#'\n self.wordchars=('abcdfeghijklmnopqrstuvwxyz'\n 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')\n if self.posix:\n self.wordchars +=('\u00df\u00e0\u00e1\u00e2\u00e3\u00e4\u00e5\u00e6\u00e7\u00e8\u00e9\u00ea\u00eb\u00ec\u00ed\u00ee\u00ef\u00f0\u00f1\u00f2\u00f3\u00f4\u00f5\u00f6\u00f8\u00f9\u00fa\u00fb\u00fc\u00fd\u00fe\u00ff'\n '\u00c0\u00c1\u00c2\u00c3\u00c4\u00c5\u00c6\u00c7\u00c8\u00c9\u00ca\u00cb\u00cc\u00cd\u00ce\u00cf\u00d0\u00d1\u00d2\u00d3\u00d4\u00d5\u00d6\u00d8\u00d9\u00da\u00db\u00dc\u00dd\u00de')\n self.whitespace=' \\t\\r\\n'\n self.whitespace_split=False\n self.quotes='\\'\"'\n self.escape='\\\\'\n self.escapedquotes='\"'\n self.state=' '\n self.pushback=deque()\n self.lineno=1\n self.debug=0\n self.token=''\n self.filestack=deque()\n self.source=None\n if not punctuation_chars:\n punctuation_chars=''\n elif punctuation_chars is True :\n punctuation_chars='();<>|&'\n self._punctuation_chars=punctuation_chars\n if punctuation_chars:\n \n self._pushback_chars=deque()\n \n self.wordchars +='~-./*?='\n \n t=self.wordchars.maketrans(dict.fromkeys(punctuation_chars))\n self.wordchars=self.wordchars.translate(t)\n \n @property\n def punctuation_chars(self):\n return self._punctuation_chars\n \n def push_token(self,tok):\n ''\n if self.debug >=1:\n print(\"shlex: pushing token \"+repr(tok))\n self.pushback.appendleft(tok)\n \n def push_source(self,newstream,newfile=None ):\n ''\n if isinstance(newstream,str):\n newstream=StringIO(newstream)\n self.filestack.appendleft((self.infile,self.instream,self.lineno))\n self.infile=newfile\n self.instream=newstream\n self.lineno=1\n if self.debug:\n if newfile is not None :\n print('shlex: pushing to file %s'%(self.infile,))\n else :\n print('shlex: pushing to stream %s'%(self.instream,))\n \n def pop_source(self):\n ''\n self.instream.close()\n (self.infile,self.instream,self.lineno)=self.filestack.popleft()\n if self.debug:\n print('shlex: popping to %s, line %d'\\\n %(self.instream,self.lineno))\n self.state=' '\n \n def get_token(self):\n ''\n if self.pushback:\n tok=self.pushback.popleft()\n if self.debug >=1:\n print(\"shlex: popping token \"+repr(tok))\n return tok\n \n raw=self.read_token()\n \n if self.source is not None :\n while raw ==self.source:\n spec=self.sourcehook(self.read_token())\n if spec:\n (newfile,newstream)=spec\n self.push_source(newstream,newfile)\n raw=self.get_token()\n \n while raw ==self.eof:\n if not self.filestack:\n return self.eof\n else :\n self.pop_source()\n raw=self.get_token()\n \n if self.debug >=1:\n if raw !=self.eof:\n print(\"shlex: token=\"+repr(raw))\n else :\n print(\"shlex: token=EOF\")\n return raw\n \n def read_token(self):\n quoted=False\n escapedstate=' '\n while True :\n if self.punctuation_chars and self._pushback_chars:\n nextchar=self._pushback_chars.pop()\n else :\n nextchar=self.instream.read(1)\n if nextchar =='\\n':\n self.lineno +=1\n if self.debug >=3:\n print(\"shlex: in state %r I see character: %r\"%(self.state,\n nextchar))\n if self.state is None :\n self.token=''\n break\n elif self.state ==' ':\n if not nextchar:\n self.state=None\n break\n elif nextchar in self.whitespace:\n if self.debug >=2:\n print(\"shlex: I see whitespace in whitespace state\")\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif nextchar in self.commenters:\n self.instream.readline()\n self.lineno +=1\n elif self.posix and nextchar in self.escape:\n escapedstate='a'\n self.state=nextchar\n elif nextchar in self.wordchars:\n self.token=nextchar\n self.state='a'\n elif nextchar in self.punctuation_chars:\n self.token=nextchar\n self.state='c'\n elif nextchar in self.quotes:\n if not self.posix:\n self.token=nextchar\n self.state=nextchar\n elif self.whitespace_split:\n self.token=nextchar\n self.state='a'\n else :\n self.token=nextchar\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif self.state in self.quotes:\n quoted=True\n if not nextchar:\n if self.debug >=2:\n print(\"shlex: I see EOF in quotes state\")\n \n raise ValueError(\"No closing quotation\")\n if nextchar ==self.state:\n if not self.posix:\n self.token +=nextchar\n self.state=' '\n break\n else :\n self.state='a'\n elif (self.posix and nextchar in self.escape and self.state\n in self.escapedquotes):\n escapedstate=self.state\n self.state=nextchar\n else :\n self.token +=nextchar\n elif self.state in self.escape:\n if not nextchar:\n if self.debug >=2:\n print(\"shlex: I see EOF in escape state\")\n \n raise ValueError(\"No escaped character\")\n \n \n if (escapedstate in self.quotes and\n nextchar !=self.state and nextchar !=escapedstate):\n self.token +=self.state\n self.token +=nextchar\n self.state=escapedstate\n elif self.state in ('a','c'):\n if not nextchar:\n self.state=None\n break\n elif nextchar in self.whitespace:\n if self.debug >=2:\n print(\"shlex: I see whitespace in word state\")\n self.state=' '\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif nextchar in self.commenters:\n self.instream.readline()\n self.lineno +=1\n if self.posix:\n self.state=' '\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n elif self.state =='c':\n if nextchar in self.punctuation_chars:\n self.token +=nextchar\n else :\n if nextchar not in self.whitespace:\n self._pushback_chars.append(nextchar)\n self.state=' '\n break\n elif self.posix and nextchar in self.quotes:\n self.state=nextchar\n elif self.posix and nextchar in self.escape:\n escapedstate='a'\n self.state=nextchar\n elif (nextchar in self.wordchars or nextchar in self.quotes\n or (self.whitespace_split and\n nextchar not in self.punctuation_chars)):\n self.token +=nextchar\n else :\n if self.punctuation_chars:\n self._pushback_chars.append(nextchar)\n else :\n self.pushback.appendleft(nextchar)\n if self.debug >=2:\n print(\"shlex: I see punctuation in word state\")\n self.state=' '\n if self.token or (self.posix and quoted):\n break\n else :\n continue\n result=self.token\n self.token=''\n if self.posix and not quoted and result =='':\n result=None\n if self.debug >1:\n if result:\n print(\"shlex: raw token=\"+repr(result))\n else :\n print(\"shlex: raw token=EOF\")\n return result\n \n def sourcehook(self,newfile):\n ''\n if newfile[0]=='\"':\n newfile=newfile[1:-1]\n \n if isinstance(self.infile,str)and not os.path.isabs(newfile):\n newfile=os.path.join(os.path.dirname(self.infile),newfile)\n return (newfile,open(newfile,\"r\"))\n \n def error_leader(self,infile=None ,lineno=None ):\n ''\n if infile is None :\n infile=self.infile\n if lineno is None :\n lineno=self.lineno\n return \"\\\"%s\\\", line %d: \"%(infile,lineno)\n \n def __iter__(self):\n return self\n \n def __next__(self):\n token=self.get_token()\n if token ==self.eof:\n raise StopIteration\n return token\n \ndef split(s,comments=False ,posix=True ):\n ''\n if s is None :\n import warnings\n warnings.warn(\"Passing None for 's' to shlex.split() is deprecated.\",\n DeprecationWarning,stacklevel=2)\n lex=shlex(s,posix=posix)\n lex.whitespace_split=True\n if not comments:\n lex.commenters=''\n return list(lex)\n \n \ndef join(split_command):\n ''\n return ' '.join(quote(arg)for arg in split_command)\n \n \n_find_unsafe=re.compile(r'[^\\w@%+=:,./-]',re.ASCII).search\n\ndef quote(s):\n ''\n if not s:\n return \"''\"\n if _find_unsafe(s)is None :\n return s\n \n \n \n return \"'\"+s.replace(\"'\",\"'\\\"'\\\"'\")+\"'\"\n \n \ndef _print_tokens(lexer):\n while 1:\n tt=lexer.get_token()\n if not tt:\n break\n print(\"Token: \"+repr(tt))\n \nif __name__ =='__main__':\n if len(sys.argv)==1:\n _print_tokens(shlex())\n else :\n fn=sys.argv[1]\n with open(fn)as f:\n _print_tokens(shlex(f,fn))\n", ["collections", "io", "os", "re", "sys", "warnings"]], "shutil": [".py", "''\n\n\n\n\n\nimport os\nimport sys\nimport stat\nimport fnmatch\nimport collections\nimport errno\n\ntry :\n import zlib\n del zlib\n _ZLIB_SUPPORTED=True\nexcept ImportError:\n _ZLIB_SUPPORTED=False\n \ntry :\n import bz2\n del bz2\n _BZ2_SUPPORTED=True\nexcept ImportError:\n _BZ2_SUPPORTED=False\n \ntry :\n import lzma\n del lzma\n _LZMA_SUPPORTED=True\nexcept ImportError:\n _LZMA_SUPPORTED=False\n \ntry :\n from pwd import getpwnam\nexcept ImportError:\n getpwnam=None\n \ntry :\n from grp import getgrnam\nexcept ImportError:\n getgrnam=None\n \n_WINDOWS=os.name =='nt'\nposix=nt=None\nif os.name =='posix':\n import posix\nelif _WINDOWS:\n import nt\n \nCOPY_BUFSIZE=1024 *1024 if _WINDOWS else 64 *1024\n_USE_CP_SENDFILE=hasattr(os,\"sendfile\")and sys.platform.startswith(\"linux\")\n_HAS_FCOPYFILE=posix and hasattr(posix,\"_fcopyfile\")\n\n__all__=[\"copyfileobj\",\"copyfile\",\"copymode\",\"copystat\",\"copy\",\"copy2\",\n\"copytree\",\"move\",\"rmtree\",\"Error\",\"SpecialFileError\",\n\"ExecError\",\"make_archive\",\"get_archive_formats\",\n\"register_archive_format\",\"unregister_archive_format\",\n\"get_unpack_formats\",\"register_unpack_format\",\n\"unregister_unpack_format\",\"unpack_archive\",\n\"ignore_patterns\",\"chown\",\"which\",\"get_terminal_size\",\n\"SameFileError\"]\n\n\nclass Error(OSError):\n pass\n \nclass SameFileError(Error):\n ''\n \nclass SpecialFileError(OSError):\n ''\n \n \nclass ExecError(OSError):\n ''\n \nclass ReadError(OSError):\n ''\n \nclass RegistryError(Exception):\n ''\n \n \nclass _GiveupOnFastCopy(Exception):\n ''\n\n \n \ndef _fastcopy_fcopyfile(fsrc,fdst,flags):\n ''\n\n \n try :\n infd=fsrc.fileno()\n outfd=fdst.fileno()\n except Exception as err:\n raise _GiveupOnFastCopy(err)\n \n try :\n posix._fcopyfile(infd,outfd,flags)\n except OSError as err:\n err.filename=fsrc.name\n err.filename2=fdst.name\n if err.errno in {errno.EINVAL,errno.ENOTSUP}:\n raise _GiveupOnFastCopy(err)\n else :\n raise err from None\n \ndef _fastcopy_sendfile(fsrc,fdst):\n ''\n\n\n \n \n \n \n \n \n \n \n \n \n global _USE_CP_SENDFILE\n try :\n infd=fsrc.fileno()\n outfd=fdst.fileno()\n except Exception as err:\n raise _GiveupOnFastCopy(err)\n \n \n \n \n \n \n try :\n blocksize=max(os.fstat(infd).st_size,2 **23)\n except OSError:\n blocksize=2 **27\n \n \n if sys.maxsize <2 **32:\n blocksize=min(blocksize,2 **30)\n \n offset=0\n while True :\n try :\n sent=os.sendfile(outfd,infd,offset,blocksize)\n except OSError as err:\n \n err.filename=fsrc.name\n err.filename2=fdst.name\n \n if err.errno ==errno.ENOTSOCK:\n \n \n \n _USE_CP_SENDFILE=False\n raise _GiveupOnFastCopy(err)\n \n if err.errno ==errno.ENOSPC:\n raise err from None\n \n \n if offset ==0 and os.lseek(outfd,0,os.SEEK_CUR)==0:\n raise _GiveupOnFastCopy(err)\n \n raise err\n else :\n if sent ==0:\n break\n offset +=sent\n \ndef _copyfileobj_readinto(fsrc,fdst,length=COPY_BUFSIZE):\n ''\n\n\n \n \n fsrc_readinto=fsrc.readinto\n fdst_write=fdst.write\n with memoryview(bytearray(length))as mv:\n while True :\n n=fsrc_readinto(mv)\n if not n:\n break\n elif n 0:\n _copyfileobj_readinto(fsrc,fdst,min(file_size,COPY_BUFSIZE))\n return dst\n \n copyfileobj(fsrc,fdst)\n \n return dst\n \ndef copymode(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n \n sys.audit(\"shutil.copymode\",src,dst)\n \n if not follow_symlinks and _islink(src)and os.path.islink(dst):\n if hasattr(os,'lchmod'):\n stat_func,chmod_func=os.lstat,os.lchmod\n else :\n return\n else :\n stat_func,chmod_func=_stat,os.chmod\n \n st=stat_func(src)\n chmod_func(dst,stat.S_IMODE(st.st_mode))\n \nif hasattr(os,'listxattr'):\n def _copyxattr(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n \n \n try :\n names=os.listxattr(src,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in (errno.ENOTSUP,errno.ENODATA,errno.EINVAL):\n raise\n return\n for name in names:\n try :\n value=os.getxattr(src,name,follow_symlinks=follow_symlinks)\n os.setxattr(dst,name,value,follow_symlinks=follow_symlinks)\n except OSError as e:\n if e.errno not in (errno.EPERM,errno.ENOTSUP,errno.ENODATA,\n errno.EINVAL):\n raise\nelse :\n def _copyxattr(*args,**kwargs):\n pass\n \ndef copystat(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.copystat\",src,dst)\n \n def _nop(*args,ns=None ,follow_symlinks=None ):\n pass\n \n \n follow=follow_symlinks or not (_islink(src)and os.path.islink(dst))\n if follow:\n \n def lookup(name):\n return getattr(os,name,_nop)\n else :\n \n \n def lookup(name):\n fn=getattr(os,name,_nop)\n if fn in os.supports_follow_symlinks:\n return fn\n return _nop\n \n if isinstance(src,os.DirEntry):\n st=src.stat(follow_symlinks=follow)\n else :\n st=lookup(\"stat\")(src,follow_symlinks=follow)\n mode=stat.S_IMODE(st.st_mode)\n lookup(\"utime\")(dst,ns=(st.st_atime_ns,st.st_mtime_ns),\n follow_symlinks=follow)\n \n \n _copyxattr(src,dst,follow_symlinks=follow)\n try :\n lookup(\"chmod\")(dst,mode,follow_symlinks=follow)\n except NotImplementedError:\n \n \n \n \n \n \n \n \n \n \n pass\n if hasattr(st,'st_flags'):\n try :\n lookup(\"chflags\")(dst,st.st_flags,follow_symlinks=follow)\n except OSError as why:\n for err in 'EOPNOTSUPP','ENOTSUP':\n if hasattr(errno,err)and why.errno ==getattr(errno,err):\n break\n else :\n raise\n \ndef copy(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copymode(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef copy2(src,dst,*,follow_symlinks=True ):\n ''\n\n\n\n\n\n\n\n\n \n if os.path.isdir(dst):\n dst=os.path.join(dst,os.path.basename(src))\n copyfile(src,dst,follow_symlinks=follow_symlinks)\n copystat(src,dst,follow_symlinks=follow_symlinks)\n return dst\n \ndef ignore_patterns(*patterns):\n ''\n\n\n \n def _ignore_patterns(path,names):\n ignored_names=[]\n for pattern in patterns:\n ignored_names.extend(fnmatch.filter(names,pattern))\n return set(ignored_names)\n return _ignore_patterns\n \ndef _copytree(entries,src,dst,symlinks,ignore,copy_function,\nignore_dangling_symlinks,dirs_exist_ok=False ):\n if ignore is not None :\n ignored_names=ignore(os.fspath(src),[x.name for x in entries])\n else :\n ignored_names=set()\n \n os.makedirs(dst,exist_ok=dirs_exist_ok)\n errors=[]\n use_srcentry=copy_function is copy2 or copy_function is copy\n \n for srcentry in entries:\n if srcentry.name in ignored_names:\n continue\n srcname=os.path.join(src,srcentry.name)\n dstname=os.path.join(dst,srcentry.name)\n srcobj=srcentry if use_srcentry else srcname\n try :\n is_symlink=srcentry.is_symlink()\n if is_symlink and os.name =='nt':\n \n \n lstat=srcentry.stat(follow_symlinks=False )\n if lstat.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT:\n is_symlink=False\n if is_symlink:\n linkto=os.readlink(srcname)\n if symlinks:\n \n \n \n os.symlink(linkto,dstname)\n copystat(srcobj,dstname,follow_symlinks=not symlinks)\n else :\n \n if not os.path.exists(linkto)and ignore_dangling_symlinks:\n continue\n \n if srcentry.is_dir():\n copytree(srcobj,dstname,symlinks,ignore,\n copy_function,dirs_exist_ok=dirs_exist_ok)\n else :\n copy_function(srcobj,dstname)\n elif srcentry.is_dir():\n copytree(srcobj,dstname,symlinks,ignore,copy_function,\n dirs_exist_ok=dirs_exist_ok)\n else :\n \n copy_function(srcobj,dstname)\n \n \n except Error as err:\n errors.extend(err.args[0])\n except OSError as why:\n errors.append((srcname,dstname,str(why)))\n try :\n copystat(src,dst)\n except OSError as why:\n \n if getattr(why,'winerror',None )is None :\n errors.append((src,dst,str(why)))\n if errors:\n raise Error(errors)\n return dst\n \ndef copytree(src,dst,symlinks=False ,ignore=None ,copy_function=copy2,\nignore_dangling_symlinks=False ,dirs_exist_ok=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.copytree\",src,dst)\n with os.scandir(src)as itr:\n entries=list(itr)\n return _copytree(entries=entries,src=src,dst=dst,symlinks=symlinks,\n ignore=ignore,copy_function=copy_function,\n ignore_dangling_symlinks=ignore_dangling_symlinks,\n dirs_exist_ok=dirs_exist_ok)\n \nif hasattr(os.stat_result,'st_file_attributes'):\n\n\n\n def _rmtree_isdir(entry):\n try :\n st=entry.stat(follow_symlinks=False )\n return (stat.S_ISDIR(st.st_mode)and not\n (st.st_file_attributes&stat.FILE_ATTRIBUTE_REPARSE_POINT\n and st.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT))\n except OSError:\n return False\n \n def _rmtree_islink(path):\n try :\n st=os.lstat(path)\n return (stat.S_ISLNK(st.st_mode)or\n (st.st_file_attributes&stat.FILE_ATTRIBUTE_REPARSE_POINT\n and st.st_reparse_tag ==stat.IO_REPARSE_TAG_MOUNT_POINT))\n except OSError:\n return False\nelse :\n def _rmtree_isdir(entry):\n try :\n return entry.is_dir(follow_symlinks=False )\n except OSError:\n return False\n \n def _rmtree_islink(path):\n return os.path.islink(path)\n \n \ndef _rmtree_unsafe(path,onerror):\n try :\n with os.scandir(path)as scandir_it:\n entries=list(scandir_it)\n except OSError:\n onerror(os.scandir,path,sys.exc_info())\n entries=[]\n for entry in entries:\n fullname=entry.path\n if _rmtree_isdir(entry):\n try :\n if entry.is_symlink():\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError:\n onerror(os.path.islink,fullname,sys.exc_info())\n continue\n _rmtree_unsafe(fullname,onerror)\n else :\n try :\n os.unlink(fullname)\n except OSError:\n onerror(os.unlink,fullname,sys.exc_info())\n try :\n os.rmdir(path)\n except OSError:\n onerror(os.rmdir,path,sys.exc_info())\n \n \ndef _rmtree_safe_fd(topfd,path,onerror):\n try :\n with os.scandir(topfd)as scandir_it:\n entries=list(scandir_it)\n except OSError as err:\n err.filename=path\n onerror(os.scandir,path,sys.exc_info())\n return\n for entry in entries:\n fullname=os.path.join(path,entry.name)\n try :\n is_dir=entry.is_dir(follow_symlinks=False )\n except OSError:\n is_dir=False\n else :\n if is_dir:\n try :\n orig_st=entry.stat(follow_symlinks=False )\n is_dir=stat.S_ISDIR(orig_st.st_mode)\n except OSError:\n onerror(os.lstat,fullname,sys.exc_info())\n continue\n if is_dir:\n try :\n dirfd=os.open(entry.name,os.O_RDONLY,dir_fd=topfd)\n except OSError:\n onerror(os.open,fullname,sys.exc_info())\n else :\n try :\n if os.path.samestat(orig_st,os.fstat(dirfd)):\n _rmtree_safe_fd(dirfd,fullname,onerror)\n try :\n os.rmdir(entry.name,dir_fd=topfd)\n except OSError:\n onerror(os.rmdir,fullname,sys.exc_info())\n else :\n try :\n \n \n \n raise OSError(\"Cannot call rmtree on a symbolic \"\n \"link\")\n except OSError:\n onerror(os.path.islink,fullname,sys.exc_info())\n finally :\n os.close(dirfd)\n else :\n try :\n os.unlink(entry.name,dir_fd=topfd)\n except OSError:\n onerror(os.unlink,fullname,sys.exc_info())\n \n_use_fd_functions=({os.open,os.stat,os.unlink,os.rmdir}<=\nos.supports_dir_fd and\nos.scandir in os.supports_fd and\nos.stat in os.supports_follow_symlinks)\n\ndef rmtree(path,ignore_errors=False ,onerror=None ):\n ''\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.rmtree\",path)\n if ignore_errors:\n def onerror(*args):\n pass\n elif onerror is None :\n def onerror(*args):\n raise\n if _use_fd_functions:\n \n if isinstance(path,bytes):\n path=os.fsdecode(path)\n \n \n try :\n orig_st=os.lstat(path)\n except Exception:\n onerror(os.lstat,path,sys.exc_info())\n return\n try :\n fd=os.open(path,os.O_RDONLY)\n except Exception:\n onerror(os.lstat,path,sys.exc_info())\n return\n try :\n if os.path.samestat(orig_st,os.fstat(fd)):\n _rmtree_safe_fd(fd,path,onerror)\n try :\n os.rmdir(path)\n except OSError:\n onerror(os.rmdir,path,sys.exc_info())\n else :\n try :\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError:\n onerror(os.path.islink,path,sys.exc_info())\n finally :\n os.close(fd)\n else :\n try :\n if _rmtree_islink(path):\n \n raise OSError(\"Cannot call rmtree on a symbolic link\")\n except OSError:\n onerror(os.path.islink,path,sys.exc_info())\n \n return\n return _rmtree_unsafe(path,onerror)\n \n \n \nrmtree.avoids_symlink_attacks=_use_fd_functions\n\ndef _basename(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n path=os.fspath(path)\n sep=os.path.sep+(os.path.altsep or '')\n return os.path.basename(path.rstrip(sep))\n \ndef move(src,dst,copy_function=copy2):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.move\",src,dst)\n real_dst=dst\n if os.path.isdir(dst):\n if _samefile(src,dst):\n \n \n os.rename(src,dst)\n return\n \n \n \n real_dst=os.path.join(dst,_basename(src))\n \n if os.path.exists(real_dst):\n raise Error(\"Destination path '%s' already exists\"%real_dst)\n try :\n os.rename(src,real_dst)\n except OSError:\n if os.path.islink(src):\n linkto=os.readlink(src)\n os.symlink(linkto,real_dst)\n os.unlink(src)\n elif os.path.isdir(src):\n if _destinsrc(src,dst):\n raise Error(\"Cannot move a directory '%s' into itself\"\n \" '%s'.\"%(src,dst))\n copytree(src,real_dst,copy_function=copy_function,\n symlinks=True )\n rmtree(src)\n else :\n copy_function(src,real_dst)\n os.unlink(src)\n return real_dst\n \ndef _destinsrc(src,dst):\n src=os.path.abspath(src)\n dst=os.path.abspath(dst)\n if not src.endswith(os.path.sep):\n src +=os.path.sep\n if not dst.endswith(os.path.sep):\n dst +=os.path.sep\n return dst.startswith(src)\n \ndef _get_gid(name):\n ''\n if getgrnam is None or name is None :\n return None\n try :\n result=getgrnam(name)\n except KeyError:\n result=None\n if result is not None :\n return result[2]\n return None\n \ndef _get_uid(name):\n ''\n if getpwnam is None or name is None :\n return None\n try :\n result=getpwnam(name)\n except KeyError:\n result=None\n if result is not None :\n return result[2]\n return None\n \ndef _make_tarball(base_name,base_dir,compress=\"gzip\",verbose=0,dry_run=0,\nowner=None ,group=None ,logger=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if compress is None :\n tar_compression=''\n elif _ZLIB_SUPPORTED and compress =='gzip':\n tar_compression='gz'\n elif _BZ2_SUPPORTED and compress =='bzip2':\n tar_compression='bz2'\n elif _LZMA_SUPPORTED and compress =='xz':\n tar_compression='xz'\n else :\n raise ValueError(\"bad value for 'compress', or compression format not \"\n \"supported : {0}\".format(compress))\n \n import tarfile\n \n compress_ext='.'+tar_compression if compress else ''\n archive_name=base_name+'.tar'+compress_ext\n archive_dir=os.path.dirname(archive_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None :\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n \n if logger is not None :\n logger.info('Creating tar archive')\n \n uid=_get_uid(owner)\n gid=_get_gid(group)\n \n def _set_uid_gid(tarinfo):\n if gid is not None :\n tarinfo.gid=gid\n tarinfo.gname=group\n if uid is not None :\n tarinfo.uid=uid\n tarinfo.uname=owner\n return tarinfo\n \n if not dry_run:\n tar=tarfile.open(archive_name,'w|%s'%tar_compression)\n try :\n tar.add(base_dir,filter=_set_uid_gid)\n finally :\n tar.close()\n \n return archive_name\n \ndef _make_zipfile(base_name,base_dir,verbose=0,dry_run=0,logger=None ):\n ''\n\n\n\n \n import zipfile\n \n zip_filename=base_name+\".zip\"\n archive_dir=os.path.dirname(base_name)\n \n if archive_dir and not os.path.exists(archive_dir):\n if logger is not None :\n logger.info(\"creating %s\",archive_dir)\n if not dry_run:\n os.makedirs(archive_dir)\n \n if logger is not None :\n logger.info(\"creating '%s' and adding '%s' to it\",\n zip_filename,base_dir)\n \n if not dry_run:\n with zipfile.ZipFile(zip_filename,\"w\",\n compression=zipfile.ZIP_DEFLATED)as zf:\n path=os.path.normpath(base_dir)\n if path !=os.curdir:\n zf.write(path,path)\n if logger is not None :\n logger.info(\"adding '%s'\",path)\n for dirpath,dirnames,filenames in os.walk(base_dir):\n for name in sorted(dirnames):\n path=os.path.normpath(os.path.join(dirpath,name))\n zf.write(path,path)\n if logger is not None :\n logger.info(\"adding '%s'\",path)\n for name in filenames:\n path=os.path.normpath(os.path.join(dirpath,name))\n if os.path.isfile(path):\n zf.write(path,path)\n if logger is not None :\n logger.info(\"adding '%s'\",path)\n \n return zip_filename\n \n_ARCHIVE_FORMATS={\n'tar':(_make_tarball,[('compress',None )],\"uncompressed tar file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _ARCHIVE_FORMATS['gztar']=(_make_tarball,[('compress','gzip')],\n \"gzip'ed tar-file\")\n _ARCHIVE_FORMATS['zip']=(_make_zipfile,[],\"ZIP file\")\n \nif _BZ2_SUPPORTED:\n _ARCHIVE_FORMATS['bztar']=(_make_tarball,[('compress','bzip2')],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _ARCHIVE_FORMATS['xztar']=(_make_tarball,[('compress','xz')],\n \"xz'ed tar-file\")\n \ndef get_archive_formats():\n ''\n\n\n \n formats=[(name,registry[2])for name,registry in\n _ARCHIVE_FORMATS.items()]\n formats.sort()\n return formats\n \ndef register_archive_format(name,function,extra_args=None ,description=''):\n ''\n\n\n\n\n\n\n \n if extra_args is None :\n extra_args=[]\n if not callable(function):\n raise TypeError('The %s object is not callable'%function)\n if not isinstance(extra_args,(tuple,list)):\n raise TypeError('extra_args needs to be a sequence')\n for element in extra_args:\n if not isinstance(element,(tuple,list))or len(element)!=2:\n raise TypeError('extra_args elements are : (arg_name, value)')\n \n _ARCHIVE_FORMATS[name]=(function,extra_args,description)\n \ndef unregister_archive_format(name):\n del _ARCHIVE_FORMATS[name]\n \ndef make_archive(base_name,format,root_dir=None ,base_dir=None ,verbose=0,\ndry_run=0,owner=None ,group=None ,logger=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.make_archive\",base_name,format,root_dir,base_dir)\n save_cwd=os.getcwd()\n if root_dir is not None :\n if logger is not None :\n logger.debug(\"changing into '%s'\",root_dir)\n base_name=os.path.abspath(base_name)\n if not dry_run:\n os.chdir(root_dir)\n \n if base_dir is None :\n base_dir=os.curdir\n \n kwargs={'dry_run':dry_run,'logger':logger}\n \n try :\n format_info=_ARCHIVE_FORMATS[format]\n except KeyError:\n raise ValueError(\"unknown archive format '%s'\"%format)from None\n \n func=format_info[0]\n for arg,val in format_info[1]:\n kwargs[arg]=val\n \n if format !='zip':\n kwargs['owner']=owner\n kwargs['group']=group\n \n try :\n filename=func(base_name,base_dir,**kwargs)\n finally :\n if root_dir is not None :\n if logger is not None :\n logger.debug(\"changing back to '%s'\",save_cwd)\n os.chdir(save_cwd)\n \n return filename\n \n \ndef get_unpack_formats():\n ''\n\n\n\n \n formats=[(name,info[0],info[3])for name,info in\n _UNPACK_FORMATS.items()]\n formats.sort()\n return formats\n \ndef _check_unpack_options(extensions,function,extra_args):\n ''\n \n existing_extensions={}\n for name,info in _UNPACK_FORMATS.items():\n for ext in info[0]:\n existing_extensions[ext]=name\n \n for extension in extensions:\n if extension in existing_extensions:\n msg='%s is already registered for \"%s\"'\n raise RegistryError(msg %(extension,\n existing_extensions[extension]))\n \n if not callable(function):\n raise TypeError('The registered function must be a callable')\n \n \ndef register_unpack_format(name,extensions,function,extra_args=None ,\ndescription=''):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if extra_args is None :\n extra_args=[]\n _check_unpack_options(extensions,function,extra_args)\n _UNPACK_FORMATS[name]=extensions,function,extra_args,description\n \ndef unregister_unpack_format(name):\n ''\n del _UNPACK_FORMATS[name]\n \ndef _ensure_directory(path):\n ''\n dirname=os.path.dirname(path)\n if not os.path.isdir(dirname):\n os.makedirs(dirname)\n \ndef _unpack_zipfile(filename,extract_dir):\n ''\n \n import zipfile\n \n if not zipfile.is_zipfile(filename):\n raise ReadError(\"%s is not a zip file\"%filename)\n \n zip=zipfile.ZipFile(filename)\n try :\n for info in zip.infolist():\n name=info.filename\n \n \n if name.startswith('/')or '..'in name:\n continue\n \n target=os.path.join(extract_dir,*name.split('/'))\n if not target:\n continue\n \n _ensure_directory(target)\n if not name.endswith('/'):\n \n data=zip.read(info.filename)\n f=open(target,'wb')\n try :\n f.write(data)\n finally :\n f.close()\n del data\n finally :\n zip.close()\n \ndef _unpack_tarfile(filename,extract_dir):\n ''\n \n import tarfile\n try :\n tarobj=tarfile.open(filename)\n except tarfile.TarError:\n raise ReadError(\n \"%s is not a compressed or uncompressed tar file\"%filename)\n try :\n tarobj.extractall(extract_dir)\n finally :\n tarobj.close()\n \n_UNPACK_FORMATS={\n'tar':(['.tar'],_unpack_tarfile,[],\"uncompressed tar file\"),\n'zip':(['.zip'],_unpack_zipfile,[],\"ZIP file\"),\n}\n\nif _ZLIB_SUPPORTED:\n _UNPACK_FORMATS['gztar']=(['.tar.gz','.tgz'],_unpack_tarfile,[],\n \"gzip'ed tar-file\")\n \nif _BZ2_SUPPORTED:\n _UNPACK_FORMATS['bztar']=(['.tar.bz2','.tbz2'],_unpack_tarfile,[],\n \"bzip2'ed tar-file\")\n \nif _LZMA_SUPPORTED:\n _UNPACK_FORMATS['xztar']=(['.tar.xz','.txz'],_unpack_tarfile,[],\n \"xz'ed tar-file\")\n \ndef _find_unpack_format(filename):\n for name,info in _UNPACK_FORMATS.items():\n for extension in info[0]:\n if filename.endswith(extension):\n return name\n return None\n \ndef unpack_archive(filename,extract_dir=None ,format=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n sys.audit(\"shutil.unpack_archive\",filename,extract_dir,format)\n \n if extract_dir is None :\n extract_dir=os.getcwd()\n \n extract_dir=os.fspath(extract_dir)\n filename=os.fspath(filename)\n \n if format is not None :\n try :\n format_info=_UNPACK_FORMATS[format]\n except KeyError:\n raise ValueError(\"Unknown unpack format '{0}'\".format(format))from None\n \n func=format_info[1]\n func(filename,extract_dir,**dict(format_info[2]))\n else :\n \n format=_find_unpack_format(filename)\n if format is None :\n raise ReadError(\"Unknown archive format '{0}'\".format(filename))\n \n func=_UNPACK_FORMATS[format][1]\n kwargs=dict(_UNPACK_FORMATS[format][2])\n func(filename,extract_dir,**kwargs)\n \n \nif hasattr(os,'statvfs'):\n\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n _ntuple_diskusage.total.__doc__='Total space in bytes'\n _ntuple_diskusage.used.__doc__='Used space in bytes'\n _ntuple_diskusage.free.__doc__='Free space in bytes'\n \n def disk_usage(path):\n ''\n\n\n\n \n st=os.statvfs(path)\n free=st.f_bavail *st.f_frsize\n total=st.f_blocks *st.f_frsize\n used=(st.f_blocks -st.f_bfree)*st.f_frsize\n return _ntuple_diskusage(total,used,free)\n \nelif _WINDOWS:\n\n __all__.append('disk_usage')\n _ntuple_diskusage=collections.namedtuple('usage','total used free')\n \n def disk_usage(path):\n ''\n\n\n\n \n total,free=nt._getdiskusage(path)\n used=total -free\n return _ntuple_diskusage(total,used,free)\n \n \ndef chown(path,user=None ,group=None ):\n ''\n\n\n\n \n sys.audit('shutil.chown',path,user,group)\n \n if user is None and group is None :\n raise ValueError(\"user and/or group must be set\")\n \n _user=user\n _group=group\n \n \n if user is None :\n _user=-1\n \n elif isinstance(user,str):\n _user=_get_uid(user)\n if _user is None :\n raise LookupError(\"no such user: {!r}\".format(user))\n \n if group is None :\n _group=-1\n elif not isinstance(group,int):\n _group=_get_gid(group)\n if _group is None :\n raise LookupError(\"no such group: {!r}\".format(group))\n \n os.chown(path,_user,_group)\n \ndef get_terminal_size(fallback=(80,24)):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n try :\n columns=int(os.environ['COLUMNS'])\n except (KeyError,ValueError):\n columns=0\n \n try :\n lines=int(os.environ['LINES'])\n except (KeyError,ValueError):\n lines=0\n \n \n if columns <=0 or lines <=0:\n try :\n size=os.get_terminal_size(sys.__stdout__.fileno())\n except (AttributeError,ValueError,OSError):\n \n \n size=os.terminal_size(fallback)\n if columns <=0:\n columns=size.columns\n if lines <=0:\n lines=size.lines\n \n return os.terminal_size((columns,lines))\n \n \n \n \n \ndef _access_check(fn,mode):\n return (os.path.exists(fn)and os.access(fn,mode)\n and not os.path.isdir(fn))\n \n \ndef which(cmd,mode=os.F_OK |os.X_OK,path=None ):\n ''\n\n\n\n\n\n\n\n \n \n \n \n if os.path.dirname(cmd):\n if _access_check(cmd,mode):\n return cmd\n return None\n \n use_bytes=isinstance(cmd,bytes)\n \n if path is None :\n path=os.environ.get(\"PATH\",None )\n if path is None :\n try :\n path=os.confstr(\"CS_PATH\")\n except (AttributeError,ValueError):\n \n path=os.defpath\n \n \n \n \n if not path:\n return None\n \n if use_bytes:\n path=os.fsencode(path)\n path=path.split(os.fsencode(os.pathsep))\n else :\n path=os.fsdecode(path)\n path=path.split(os.pathsep)\n \n if sys.platform ==\"win32\":\n \n curdir=os.curdir\n if use_bytes:\n curdir=os.fsencode(curdir)\n if curdir not in path:\n path.insert(0,curdir)\n \n \n pathext=os.environ.get(\"PATHEXT\",\"\").split(os.pathsep)\n if use_bytes:\n pathext=[os.fsencode(ext)for ext in pathext]\n \n \n \n \n if any(cmd.lower().endswith(ext.lower())for ext in pathext):\n files=[cmd]\n else :\n files=[cmd+ext for ext in pathext]\n else :\n \n \n files=[cmd]\n \n seen=set()\n for dir in path:\n normdir=os.path.normcase(dir)\n if not normdir in seen:\n seen.add(normdir)\n for thefile in files:\n name=os.path.join(dir,thefile)\n if _access_check(name,mode):\n return name\n return None\n", ["bz2", "collections", "errno", "fnmatch", "grp", "lzma", "nt", "os", "posix", "pwd", "stat", "sys", "tarfile", "zipfile", "zlib"]], "signal": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nCTRL_BREAK_EVENT=1\nCTRL_C_EVENT=0\nNSIG=23\nSIGABRT=22\nSIGBREAK=21\nSIGFPE=8\nSIGILL=4\nSIGINT=2\nSIGSEGV=11\nSIGTERM=15\nSIG_DFL=0\nSIG_IGN=1\n\ndef signal(signalnum,handler):\n pass\n", []], "site": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport sys\nimport os\nimport builtins\nimport _sitebuiltins\nimport io\n\n\nPREFIXES=[sys.prefix,sys.exec_prefix]\n\n\nENABLE_USER_SITE=None\n\n\n\n\nUSER_SITE=None\nUSER_BASE=None\n\n\ndef makepath(*paths):\n dir=os.path.join(*paths)\n try :\n dir=os.path.abspath(dir)\n except OSError:\n pass\n return dir,os.path.normcase(dir)\n \n \ndef abs_paths():\n ''\n for m in set(sys.modules.values()):\n if (getattr(getattr(m,'__loader__',None ),'__module__',None )not in\n ('_frozen_importlib','_frozen_importlib_external')):\n continue\n try :\n m.__file__=os.path.abspath(m.__file__)\n except (AttributeError,OSError,TypeError):\n pass\n try :\n m.__cached__=os.path.abspath(m.__cached__)\n except (AttributeError,OSError,TypeError):\n pass\n \n \ndef removeduppaths():\n ''\n \n \n \n L=[]\n known_paths=set()\n for dir in sys.path:\n \n \n \n dir,dircase=makepath(dir)\n if dircase not in known_paths:\n L.append(dir)\n known_paths.add(dircase)\n sys.path[:]=L\n return known_paths\n \n \ndef _init_pathinfo():\n ''\n d=set()\n for item in sys.path:\n try :\n if os.path.exists(item):\n _,itemcase=makepath(item)\n d.add(itemcase)\n except TypeError:\n continue\n return d\n \n \ndef addpackage(sitedir,name,known_paths):\n ''\n\n\n \n if known_paths is None :\n known_paths=_init_pathinfo()\n reset=True\n else :\n reset=False\n fullname=os.path.join(sitedir,name)\n try :\n f=io.TextIOWrapper(io.open_code(fullname))\n except OSError:\n return\n with f:\n for n,line in enumerate(f):\n if line.startswith(\"#\"):\n continue\n try :\n if line.startswith((\"import \",\"import\\t\")):\n exec(line)\n continue\n line=line.rstrip()\n dir,dircase=makepath(sitedir,line)\n if not dircase in known_paths and os.path.exists(dir):\n sys.path.append(dir)\n known_paths.add(dircase)\n except Exception:\n print(\"Error processing line {:d} of {}:\\n\".format(n+1,fullname),\n file=sys.stderr)\n import traceback\n for record in traceback.format_exception(*sys.exc_info()):\n for line in record.splitlines():\n print(' '+line,file=sys.stderr)\n print(\"\\nRemainder of file ignored\",file=sys.stderr)\n break\n if reset:\n known_paths=None\n return known_paths\n \n \ndef addsitedir(sitedir,known_paths=None ):\n ''\n \n if known_paths is None :\n known_paths=_init_pathinfo()\n reset=True\n else :\n reset=False\n sitedir,sitedircase=makepath(sitedir)\n if not sitedircase in known_paths:\n sys.path.append(sitedir)\n known_paths.add(sitedircase)\n try :\n names=os.listdir(sitedir)\n except OSError:\n return\n names=[name for name in names if name.endswith(\".pth\")]\n for name in sorted(names):\n addpackage(sitedir,name,known_paths)\n if reset:\n known_paths=None\n return known_paths\n \n \ndef check_enableusersite():\n ''\n\n\n\n\n\n\n\n \n if sys.flags.no_user_site:\n return False\n \n if hasattr(os,\"getuid\")and hasattr(os,\"geteuid\"):\n \n if os.geteuid()!=os.getuid():\n return None\n if hasattr(os,\"getgid\")and hasattr(os,\"getegid\"):\n \n if os.getegid()!=os.getgid():\n return None\n \n return True\n \n \n \n \n \n \n \n \n \ndef _getuserbase():\n env_base=os.environ.get(\"PYTHONUSERBASE\",None )\n if env_base:\n return env_base\n \n def joinuser(*args):\n return os.path.expanduser(os.path.join(*args))\n \n if os.name ==\"nt\":\n base=os.environ.get(\"APPDATA\")or \"~\"\n return joinuser(base,\"Python\")\n \n if sys.platform ==\"darwin\"and sys._framework:\n return joinuser(\"~\",\"Library\",sys._framework,\n \"%d.%d\"%sys.version_info[:2])\n \n return joinuser(\"~\",\".local\")\n \n \n \ndef _get_path(userbase):\n version=sys.version_info\n \n if os.name =='nt':\n return f'{userbase}\\\\Python{version[0]}{version[1]}\\\\site-packages'\n \n if sys.platform =='darwin'and sys._framework:\n return f'{userbase}/lib/python/site-packages'\n \n return f'{userbase}/lib/python{version[0]}.{version[1]}/site-packages'\n \n \ndef getuserbase():\n ''\n\n\n\n\n \n global USER_BASE\n if USER_BASE is None :\n USER_BASE=_getuserbase()\n return USER_BASE\n \n \ndef getusersitepackages():\n ''\n\n\n\n \n global USER_SITE\n userbase=getuserbase()\n \n if USER_SITE is None :\n USER_SITE=_get_path(userbase)\n \n return USER_SITE\n \ndef addusersitepackages(known_paths):\n ''\n\n\n\n \n \n \n user_site=getusersitepackages()\n \n if ENABLE_USER_SITE and os.path.isdir(user_site):\n addsitedir(user_site,known_paths)\n return known_paths\n \ndef getsitepackages(prefixes=None ):\n ''\n\n\n\n\n \n sitepackages=[]\n seen=set()\n \n if prefixes is None :\n prefixes=PREFIXES\n \n for prefix in prefixes:\n if not prefix or prefix in seen:\n continue\n seen.add(prefix)\n \n libdirs=[sys.platlibdir]\n if sys.platlibdir !=\"lib\":\n libdirs.append(\"lib\")\n \n if os.sep =='/':\n for libdir in libdirs:\n path=os.path.join(prefix,libdir,\n \"python%d.%d\"%sys.version_info[:2],\n \"site-packages\")\n sitepackages.append(path)\n else :\n sitepackages.append(prefix)\n \n for libdir in libdirs:\n path=os.path.join(prefix,libdir,\"site-packages\")\n sitepackages.append(path)\n return sitepackages\n \ndef addsitepackages(known_paths,prefixes=None ):\n ''\n for sitedir in getsitepackages(prefixes):\n if os.path.isdir(sitedir):\n addsitedir(sitedir,known_paths)\n \n return known_paths\n \ndef setquit():\n ''\n\n\n\n\n \n if os.sep =='\\\\':\n eof='Ctrl-Z plus Return'\n else :\n eof='Ctrl-D (i.e. EOF)'\n \n builtins.quit=_sitebuiltins.Quitter('quit',eof)\n builtins.exit=_sitebuiltins.Quitter('exit',eof)\n \n \ndef setcopyright():\n ''\n builtins.copyright=_sitebuiltins._Printer(\"copyright\",sys.copyright)\n if sys.platform[:4]=='java':\n builtins.credits=_sitebuiltins._Printer(\n \"credits\",\n \"Jython is maintained by the Jython developers (www.jython.org).\")\n else :\n builtins.credits=_sitebuiltins._Printer(\"credits\",\"\"\"\\\n Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands\n for supporting Python development. See www.python.org for more information.\"\"\")\n files,dirs=[],[]\n \n \n if hasattr(os,'__file__'):\n here=os.path.dirname(os.__file__)\n files.extend([\"LICENSE.txt\",\"LICENSE\"])\n dirs.extend([os.path.join(here,os.pardir),here,os.curdir])\n builtins.license=_sitebuiltins._Printer(\n \"license\",\n \"See https://www.python.org/psf/license/\",\n files,dirs)\n \n \ndef sethelper():\n builtins.help=_sitebuiltins._Helper()\n \ndef enablerlcompleter():\n ''\n\n\n\n\n\n\n \n def register_readline():\n import atexit\n try :\n import readline\n import rlcompleter\n except ImportError:\n return\n \n \n \n readline_doc=getattr(readline,'__doc__','')\n if readline_doc is not None and 'libedit'in readline_doc:\n readline.parse_and_bind('bind ^I rl_complete')\n else :\n readline.parse_and_bind('tab: complete')\n \n try :\n readline.read_init_file()\n except OSError:\n \n \n \n \n pass\n \n if readline.get_current_history_length()==0:\n \n \n \n \n \n history=os.path.join(os.path.expanduser('~'),\n '.python_history')\n try :\n readline.read_history_file(history)\n except OSError:\n pass\n \n def write_history():\n try :\n readline.write_history_file(history)\n except OSError:\n \n \n pass\n \n atexit.register(write_history)\n \n sys.__interactivehook__=register_readline\n \ndef venv(known_paths):\n global PREFIXES,ENABLE_USER_SITE\n \n env=os.environ\n if sys.platform =='darwin'and '__PYVENV_LAUNCHER__'in env:\n executable=sys._base_executable=os.environ['__PYVENV_LAUNCHER__']\n else :\n executable=sys.executable\n exe_dir,_=os.path.split(os.path.abspath(executable))\n site_prefix=os.path.dirname(exe_dir)\n sys._home=None\n conf_basename='pyvenv.cfg'\n candidate_confs=[\n conffile for conffile in (\n os.path.join(exe_dir,conf_basename),\n os.path.join(site_prefix,conf_basename)\n )\n if os.path.isfile(conffile)\n ]\n \n if candidate_confs:\n virtual_conf=candidate_confs[0]\n system_site=\"true\"\n \n \n with open(virtual_conf,encoding='utf-8')as f:\n for line in f:\n if '='in line:\n key,_,value=line.partition('=')\n key=key.strip().lower()\n value=value.strip()\n if key =='include-system-site-packages':\n system_site=value.lower()\n elif key =='home':\n sys._home=value\n \n sys.prefix=sys.exec_prefix=site_prefix\n \n \n addsitepackages(known_paths,[sys.prefix])\n \n \n \n if system_site ==\"true\":\n PREFIXES.insert(0,sys.prefix)\n else :\n PREFIXES=[sys.prefix]\n ENABLE_USER_SITE=False\n \n return known_paths\n \n \ndef execsitecustomize():\n ''\n try :\n try :\n import sitecustomize\n except ImportError as exc:\n if exc.name =='sitecustomize':\n pass\n else :\n raise\n except Exception as err:\n if sys.flags.verbose:\n sys.excepthook(*sys.exc_info())\n else :\n sys.stderr.write(\n \"Error in sitecustomize; set PYTHONVERBOSE for traceback:\\n\"\n \"%s: %s\\n\"%\n (err.__class__.__name__,err))\n \n \ndef execusercustomize():\n ''\n try :\n try :\n import usercustomize\n except ImportError as exc:\n if exc.name =='usercustomize':\n pass\n else :\n raise\n except Exception as err:\n if sys.flags.verbose:\n sys.excepthook(*sys.exc_info())\n else :\n sys.stderr.write(\n \"Error in usercustomize; set PYTHONVERBOSE for traceback:\\n\"\n \"%s: %s\\n\"%\n (err.__class__.__name__,err))\n \n \ndef main():\n ''\n\n\n\n \n global ENABLE_USER_SITE\n \n orig_path=sys.path[:]\n known_paths=removeduppaths()\n if orig_path !=sys.path:\n \n \n abs_paths()\n \n known_paths=venv(known_paths)\n if ENABLE_USER_SITE is None :\n ENABLE_USER_SITE=check_enableusersite()\n known_paths=addusersitepackages(known_paths)\n known_paths=addsitepackages(known_paths)\n setquit()\n setcopyright()\n sethelper()\n if not sys.flags.isolated:\n enablerlcompleter()\n execsitecustomize()\n if ENABLE_USER_SITE:\n execusercustomize()\n \n \n \nif not sys.flags.no_site:\n main()\n \ndef _script():\n help=\"\"\"\\\n %s [--user-base] [--user-site]\n\n Without arguments print some useful information\n With arguments print the value of USER_BASE and/or USER_SITE separated\n by '%s'.\n\n Exit codes with --user-base or --user-site:\n 0 - user site directory is enabled\n 1 - user site directory is disabled by user\n 2 - user site directory is disabled by super user\n or for security reasons\n >2 - unknown error\n \"\"\"\n args=sys.argv[1:]\n if not args:\n user_base=getuserbase()\n user_site=getusersitepackages()\n print(\"sys.path = [\")\n for dir in sys.path:\n print(\" %r,\"%(dir,))\n print(\"]\")\n print(\"USER_BASE: %r (%s)\"%(user_base,\n \"exists\"if os.path.isdir(user_base)else \"doesn't exist\"))\n print(\"USER_SITE: %r (%s)\"%(user_site,\n \"exists\"if os.path.isdir(user_site)else \"doesn't exist\"))\n print(\"ENABLE_USER_SITE: %r\"%ENABLE_USER_SITE)\n sys.exit(0)\n \n buffer=[]\n if '--user-base'in args:\n buffer.append(USER_BASE)\n if '--user-site'in args:\n buffer.append(USER_SITE)\n \n if buffer:\n print(os.pathsep.join(buffer))\n if ENABLE_USER_SITE:\n sys.exit(0)\n elif ENABLE_USER_SITE is False :\n sys.exit(1)\n elif ENABLE_USER_SITE is None :\n sys.exit(2)\n else :\n sys.exit(3)\n else :\n import textwrap\n print(textwrap.dedent(help %(sys.argv[0],os.pathsep)))\n sys.exit(10)\n \nif __name__ =='__main__':\n _script()\n", ["_sitebuiltins", "atexit", "builtins", "io", "os", "readline", "rlcompleter", "sitecustomize", "sys", "textwrap", "traceback", "usercustomize"]], "socket": [".py", "\n\n\n\"\"\"\\\nThis module provides socket operations and some related functions.\nOn Unix, it supports IP (Internet Protocol) and Unix domain sockets.\nOn other systems, it only supports IP. Functions specific for a\nsocket are available as methods of the socket object.\n\nFunctions:\n\nsocket() -- create a new socket object\nsocketpair() -- create a pair of new socket objects [*]\nfromfd() -- create a socket object from an open file descriptor [*]\nsend_fds() -- Send file descriptor to the socket.\nrecv_fds() -- Recieve file descriptors from the socket.\nfromshare() -- create a socket object from data received from socket.share() [*]\ngethostname() -- return the current hostname\ngethostbyname() -- map a hostname to its IP number\ngethostbyaddr() -- map an IP number or hostname to DNS info\ngetservbyname() -- map a service name and a protocol name to a port number\ngetprotobyname() -- map a protocol name (e.g. 'tcp') to a number\nntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order\nhtons(), htonl() -- convert 16, 32 bit int from host to network byte order\ninet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format\ninet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)\nsocket.getdefaulttimeout() -- get the default timeout value\nsocket.setdefaulttimeout() -- set the default timeout value\ncreate_connection() -- connects to an address, with an optional timeout and\n optional source address.\n\n [*] not available on all platforms!\n\nSpecial objects:\n\nSocketType -- type object for socket objects\nerror -- exception raised for I/O errors\nhas_ipv6 -- boolean value indicating if IPv6 is supported\n\nIntEnum constants:\n\nAF_INET, AF_UNIX -- socket domains (first argument to socket() call)\nSOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)\n\nInteger constants:\n\nMany other constants may be defined; these may be used in calls to\nthe setsockopt() and getsockopt() methods.\n\"\"\"\n\nimport _socket\nfrom _socket import *\n\nimport os,sys,io,selectors\nfrom enum import IntEnum,IntFlag\n\ntry :\n import errno\nexcept ImportError:\n errno=None\nEBADF=getattr(errno,'EBADF',9)\nEAGAIN=getattr(errno,'EAGAIN',11)\nEWOULDBLOCK=getattr(errno,'EWOULDBLOCK',11)\n\n__all__=[\"fromfd\",\"getfqdn\",\"create_connection\",\"create_server\",\n\"has_dualstack_ipv6\",\"AddressFamily\",\"SocketKind\"]\n__all__.extend(os._get_exports_list(_socket))\n\n\n\n\n\n\n\nIntEnum._convert_(\n'AddressFamily',\n__name__,\nlambda C:C.isupper()and C.startswith('AF_'))\n\nIntEnum._convert_(\n'SocketKind',\n__name__,\nlambda C:C.isupper()and C.startswith('SOCK_'))\n\nIntFlag._convert_(\n'MsgFlag',\n__name__,\nlambda C:C.isupper()and C.startswith('MSG_'))\n\nIntFlag._convert_(\n'AddressInfo',\n__name__,\nlambda C:C.isupper()and C.startswith('AI_'))\n\n_LOCALHOST='127.0.0.1'\n_LOCALHOST_V6='::1'\n\n\ndef _intenum_converter(value,enum_klass):\n ''\n\n\n \n try :\n return enum_klass(value)\n except ValueError:\n return value\n \n \n \nif sys.platform.lower().startswith(\"win\"):\n errorTab={}\n errorTab[6]=\"Specified event object handle is invalid.\"\n errorTab[8]=\"Insufficient memory available.\"\n errorTab[87]=\"One or more parameters are invalid.\"\n errorTab[995]=\"Overlapped operation aborted.\"\n errorTab[996]=\"Overlapped I/O event object not in signaled state.\"\n errorTab[997]=\"Overlapped operation will complete later.\"\n errorTab[10004]=\"The operation was interrupted.\"\n errorTab[10009]=\"A bad file handle was passed.\"\n errorTab[10013]=\"Permission denied.\"\n errorTab[10014]=\"A fault occurred on the network??\"\n errorTab[10022]=\"An invalid operation was attempted.\"\n errorTab[10024]=\"Too many open files.\"\n errorTab[10035]=\"The socket operation would block\"\n errorTab[10036]=\"A blocking operation is already in progress.\"\n errorTab[10037]=\"Operation already in progress.\"\n errorTab[10038]=\"Socket operation on nonsocket.\"\n errorTab[10039]=\"Destination address required.\"\n errorTab[10040]=\"Message too long.\"\n errorTab[10041]=\"Protocol wrong type for socket.\"\n errorTab[10042]=\"Bad protocol option.\"\n errorTab[10043]=\"Protocol not supported.\"\n errorTab[10044]=\"Socket type not supported.\"\n errorTab[10045]=\"Operation not supported.\"\n errorTab[10046]=\"Protocol family not supported.\"\n errorTab[10047]=\"Address family not supported by protocol family.\"\n errorTab[10048]=\"The network address is in use.\"\n errorTab[10049]=\"Cannot assign requested address.\"\n errorTab[10050]=\"Network is down.\"\n errorTab[10051]=\"Network is unreachable.\"\n errorTab[10052]=\"Network dropped connection on reset.\"\n errorTab[10053]=\"Software caused connection abort.\"\n errorTab[10054]=\"The connection has been reset.\"\n errorTab[10055]=\"No buffer space available.\"\n errorTab[10056]=\"Socket is already connected.\"\n errorTab[10057]=\"Socket is not connected.\"\n errorTab[10058]=\"The network has been shut down.\"\n errorTab[10059]=\"Too many references.\"\n errorTab[10060]=\"The operation timed out.\"\n errorTab[10061]=\"Connection refused.\"\n errorTab[10062]=\"Cannot translate name.\"\n errorTab[10063]=\"The name is too long.\"\n errorTab[10064]=\"The host is down.\"\n errorTab[10065]=\"The host is unreachable.\"\n errorTab[10066]=\"Directory not empty.\"\n errorTab[10067]=\"Too many processes.\"\n errorTab[10068]=\"User quota exceeded.\"\n errorTab[10069]=\"Disk quota exceeded.\"\n errorTab[10070]=\"Stale file handle reference.\"\n errorTab[10071]=\"Item is remote.\"\n errorTab[10091]=\"Network subsystem is unavailable.\"\n errorTab[10092]=\"Winsock.dll version out of range.\"\n errorTab[10093]=\"Successful WSAStartup not yet performed.\"\n errorTab[10101]=\"Graceful shutdown in progress.\"\n errorTab[10102]=\"No more results from WSALookupServiceNext.\"\n errorTab[10103]=\"Call has been canceled.\"\n errorTab[10104]=\"Procedure call table is invalid.\"\n errorTab[10105]=\"Service provider is invalid.\"\n errorTab[10106]=\"Service provider failed to initialize.\"\n errorTab[10107]=\"System call failure.\"\n errorTab[10108]=\"Service not found.\"\n errorTab[10109]=\"Class type not found.\"\n errorTab[10110]=\"No more results from WSALookupServiceNext.\"\n errorTab[10111]=\"Call was canceled.\"\n errorTab[10112]=\"Database query was refused.\"\n errorTab[11001]=\"Host not found.\"\n errorTab[11002]=\"Nonauthoritative host not found.\"\n errorTab[11003]=\"This is a nonrecoverable error.\"\n errorTab[11004]=\"Valid name, no data record requested type.\"\n errorTab[11005]=\"QoS receivers.\"\n errorTab[11006]=\"QoS senders.\"\n errorTab[11007]=\"No QoS senders.\"\n errorTab[11008]=\"QoS no receivers.\"\n errorTab[11009]=\"QoS request confirmed.\"\n errorTab[11010]=\"QoS admission error.\"\n errorTab[11011]=\"QoS policy failure.\"\n errorTab[11012]=\"QoS bad style.\"\n errorTab[11013]=\"QoS bad object.\"\n errorTab[11014]=\"QoS traffic control error.\"\n errorTab[11015]=\"QoS generic error.\"\n errorTab[11016]=\"QoS service type error.\"\n errorTab[11017]=\"QoS flowspec error.\"\n errorTab[11018]=\"Invalid QoS provider buffer.\"\n errorTab[11019]=\"Invalid QoS filter style.\"\n errorTab[11020]=\"Invalid QoS filter style.\"\n errorTab[11021]=\"Incorrect QoS filter count.\"\n errorTab[11022]=\"Invalid QoS object length.\"\n errorTab[11023]=\"Incorrect QoS flow count.\"\n errorTab[11024]=\"Unrecognized QoS object.\"\n errorTab[11025]=\"Invalid QoS policy object.\"\n errorTab[11026]=\"Invalid QoS flow descriptor.\"\n errorTab[11027]=\"Invalid QoS provider-specific flowspec.\"\n errorTab[11028]=\"Invalid QoS provider-specific filterspec.\"\n errorTab[11029]=\"Invalid QoS shape discard mode object.\"\n errorTab[11030]=\"Invalid QoS shaping rate object.\"\n errorTab[11031]=\"Reserved policy QoS element type.\"\n __all__.append(\"errorTab\")\n \n \nclass _GiveupOnSendfile(Exception):pass\n\n\nclass socket(_socket.socket):\n\n ''\n \n __slots__=[\"__weakref__\",\"_io_refs\",\"_closed\"]\n \n def __init__(self,family=-1,type=-1,proto=-1,fileno=None ):\n \n \n \n \n if fileno is None :\n if family ==-1:\n family=AF_INET\n if type ==-1:\n type=SOCK_STREAM\n if proto ==-1:\n proto=0\n _socket.socket.__init__(self,family,type,proto,fileno)\n self._io_refs=0\n self._closed=False\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n if not self._closed:\n self.close()\n \n def __repr__(self):\n ''\n\n \n closed=getattr(self,'_closed',False )\n s=\"<%s.%s%s fd=%i, family=%s, type=%s, proto=%i\"\\\n %(self.__class__.__module__,\n self.__class__.__qualname__,\n \" [closed]\"if closed else \"\",\n self.fileno(),\n self.family,\n self.type,\n self.proto)\n if not closed:\n try :\n laddr=self.getsockname()\n if laddr:\n s +=\", laddr=%s\"%str(laddr)\n except error:\n pass\n try :\n raddr=self.getpeername()\n if raddr:\n s +=\", raddr=%s\"%str(raddr)\n except error:\n pass\n s +='>'\n return s\n \n def __getstate__(self):\n raise TypeError(f\"cannot pickle {self.__class__.__name__!r} object\")\n \n def dup(self):\n ''\n\n\n\n \n fd=dup(self.fileno())\n sock=self.__class__(self.family,self.type,self.proto,fileno=fd)\n sock.settimeout(self.gettimeout())\n return sock\n \n def accept(self):\n ''\n\n\n\n\n \n fd,addr=self._accept()\n sock=socket(self.family,self.type,self.proto,fileno=fd)\n \n \n \n if getdefaulttimeout()is None and self.gettimeout():\n sock.setblocking(True )\n return sock,addr\n \n def makefile(self,mode=\"r\",buffering=None ,*,\n encoding=None ,errors=None ,newline=None ):\n ''\n\n\n\n \n \n if not set(mode)<={\"r\",\"w\",\"b\"}:\n raise ValueError(\"invalid mode %r (only r, w, b allowed)\"%(mode,))\n writing=\"w\"in mode\n reading=\"r\"in mode or not writing\n assert reading or writing\n binary=\"b\"in mode\n rawmode=\"\"\n if reading:\n rawmode +=\"r\"\n if writing:\n rawmode +=\"w\"\n raw=SocketIO(self,rawmode)\n self._io_refs +=1\n if buffering is None :\n buffering=-1\n if buffering <0:\n buffering=io.DEFAULT_BUFFER_SIZE\n if buffering ==0:\n if not binary:\n raise ValueError(\"unbuffered streams must be binary\")\n return raw\n if reading and writing:\n buffer=io.BufferedRWPair(raw,raw,buffering)\n elif reading:\n buffer=io.BufferedReader(raw,buffering)\n else :\n assert writing\n buffer=io.BufferedWriter(raw,buffering)\n if binary:\n return buffer\n text=io.TextIOWrapper(buffer,encoding,errors,newline)\n text.mode=mode\n return text\n \n if hasattr(os,'sendfile'):\n \n def _sendfile_use_sendfile(self,file,offset=0,count=None ):\n self._check_sendfile_params(file,offset,count)\n sockno=self.fileno()\n try :\n fileno=file.fileno()\n except (AttributeError,io.UnsupportedOperation)as err:\n raise _GiveupOnSendfile(err)\n try :\n fsize=os.fstat(fileno).st_size\n except OSError as err:\n raise _GiveupOnSendfile(err)\n if not fsize:\n return 0\n \n blocksize=min(count or fsize,2 **30)\n timeout=self.gettimeout()\n if timeout ==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n \n \n \n if hasattr(selectors,'PollSelector'):\n selector=selectors.PollSelector()\n else :\n selector=selectors.SelectSelector()\n selector.register(sockno,selectors.EVENT_WRITE)\n \n total_sent=0\n \n selector_select=selector.select\n os_sendfile=os.sendfile\n try :\n while True :\n if timeout and not selector_select(timeout):\n raise _socket.timeout('timed out')\n if count:\n blocksize=count -total_sent\n if blocksize <=0:\n break\n try :\n sent=os_sendfile(sockno,fileno,offset,blocksize)\n except BlockingIOError:\n if not timeout:\n \n \n selector_select()\n continue\n except OSError as err:\n if total_sent ==0:\n \n \n \n \n raise _GiveupOnSendfile(err)\n raise err from None\n else :\n if sent ==0:\n break\n offset +=sent\n total_sent +=sent\n return total_sent\n finally :\n if total_sent >0 and hasattr(file,'seek'):\n file.seek(offset)\n else :\n def _sendfile_use_sendfile(self,file,offset=0,count=None ):\n raise _GiveupOnSendfile(\n \"os.sendfile() not available on this platform\")\n \n def _sendfile_use_send(self,file,offset=0,count=None ):\n self._check_sendfile_params(file,offset,count)\n if self.gettimeout()==0:\n raise ValueError(\"non-blocking sockets are not supported\")\n if offset:\n file.seek(offset)\n blocksize=min(count,8192)if count else 8192\n total_sent=0\n \n file_read=file.read\n sock_send=self.send\n try :\n while True :\n if count:\n blocksize=min(count -total_sent,blocksize)\n if blocksize <=0:\n break\n data=memoryview(file_read(blocksize))\n if not data:\n break\n while True :\n try :\n sent=sock_send(data)\n except BlockingIOError:\n continue\n else :\n total_sent +=sent\n if sent 0 and hasattr(file,'seek'):\n file.seek(offset+total_sent)\n \n def _check_sendfile_params(self,file,offset,count):\n if 'b'not in getattr(file,'mode','b'):\n raise ValueError(\"file should be opened in binary mode\")\n if not self.type&SOCK_STREAM:\n raise ValueError(\"only SOCK_STREAM type sockets are supported\")\n if count is not None :\n if not isinstance(count,int):\n raise TypeError(\n \"count must be a positive integer (got {!r})\".format(count))\n if count <=0:\n raise ValueError(\n \"count must be a positive integer (got {!r})\".format(count))\n \n def sendfile(self,file,offset=0,count=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n return self._sendfile_use_sendfile(file,offset,count)\n except _GiveupOnSendfile:\n return self._sendfile_use_send(file,offset,count)\n \n def _decref_socketios(self):\n if self._io_refs >0:\n self._io_refs -=1\n if self._closed:\n self.close()\n \n def _real_close(self,_ss=_socket.socket):\n \n _ss.close(self)\n \n def close(self):\n \n self._closed=True\n if self._io_refs <=0:\n self._real_close()\n \n def detach(self):\n ''\n\n\n\n\n \n self._closed=True\n return super().detach()\n \n @property\n def family(self):\n ''\n \n return _intenum_converter(super().family,AddressFamily)\n \n @property\n def type(self):\n ''\n \n return _intenum_converter(super().type,SocketKind)\n \n if os.name =='nt':\n def get_inheritable(self):\n return os.get_handle_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_handle_inheritable(self.fileno(),inheritable)\n else :\n def get_inheritable(self):\n return os.get_inheritable(self.fileno())\n def set_inheritable(self,inheritable):\n os.set_inheritable(self.fileno(),inheritable)\n get_inheritable.__doc__=\"Get the inheritable flag of the socket\"\n set_inheritable.__doc__=\"Set the inheritable flag of the socket\"\n \ndef fromfd(fd,family,type,proto=0):\n ''\n\n\n\n \n nfd=dup(fd)\n return socket(family,type,proto,nfd)\n \nif hasattr(_socket.socket,\"sendmsg\"):\n import array\n \n def send_fds(sock,buffers,fds,flags=0,address=None ):\n ''\n\n\n \n return sock.sendmsg(buffers,[(_socket.SOL_SOCKET,\n _socket.SCM_RIGHTS,array.array(\"i\",fds))])\n __all__.append(\"send_fds\")\n \nif hasattr(_socket.socket,\"recvmsg\"):\n import array\n \n def recv_fds(sock,bufsize,maxfds,flags=0):\n ''\n\n\n\n\n \n \n fds=array.array(\"i\")\n msg,ancdata,flags,addr=sock.recvmsg(bufsize,\n _socket.CMSG_LEN(maxfds *fds.itemsize))\n for cmsg_level,cmsg_type,cmsg_data in ancdata:\n if (cmsg_level ==_socket.SOL_SOCKET and cmsg_type ==_socket.SCM_RIGHTS):\n fds.frombytes(cmsg_data[:\n len(cmsg_data)-(len(cmsg_data)%fds.itemsize)])\n \n return msg,list(fds),flags,addr\n __all__.append(\"recv_fds\")\n \nif hasattr(_socket.socket,\"share\"):\n def fromshare(info):\n ''\n\n\n\n \n return socket(0,0,0,info)\n __all__.append(\"fromshare\")\n \nif hasattr(_socket,\"socketpair\"):\n\n def socketpair(family=None ,type=SOCK_STREAM,proto=0):\n ''\n\n\n\n\n\n \n if family is None :\n try :\n family=AF_UNIX\n except NameError:\n family=AF_INET\n a,b=_socket.socketpair(family,type,proto)\n a=socket(family,type,proto,a.detach())\n b=socket(family,type,proto,b.detach())\n return a,b\n \nelse :\n\n\n def socketpair(family=AF_INET,type=SOCK_STREAM,proto=0):\n if family ==AF_INET:\n host=_LOCALHOST\n elif family ==AF_INET6:\n host=_LOCALHOST_V6\n else :\n raise ValueError(\"Only AF_INET and AF_INET6 socket address families \"\n \"are supported\")\n if type !=SOCK_STREAM:\n raise ValueError(\"Only SOCK_STREAM socket type is supported\")\n if proto !=0:\n raise ValueError(\"Only protocol zero is supported\")\n \n \n \n lsock=socket(family,type,proto)\n try :\n lsock.bind((host,0))\n lsock.listen()\n \n addr,port=lsock.getsockname()[:2]\n csock=socket(family,type,proto)\n try :\n csock.setblocking(False )\n try :\n csock.connect((addr,port))\n except (BlockingIOError,InterruptedError):\n pass\n csock.setblocking(True )\n ssock,_=lsock.accept()\n except :\n csock.close()\n raise\n finally :\n lsock.close()\n return (ssock,csock)\n __all__.append(\"socketpair\")\n \nsocketpair.__doc__=\"\"\"socketpair([family[, type[, proto]]]) -> (socket object, socket object)\nCreate a pair of socket objects from the sockets returned by the platform\nsocketpair() function.\nThe arguments are the same as for socket() except the default family is AF_UNIX\nif defined on the platform; otherwise, the default is AF_INET.\n\"\"\"\n\n_blocking_errnos={EAGAIN,EWOULDBLOCK}\n\nclass SocketIO(io.RawIOBase):\n\n ''\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n def __init__(self,sock,mode):\n if mode not in (\"r\",\"w\",\"rw\",\"rb\",\"wb\",\"rwb\"):\n raise ValueError(\"invalid mode: %r\"%mode)\n io.RawIOBase.__init__(self)\n self._sock=sock\n if \"b\"not in mode:\n mode +=\"b\"\n self._mode=mode\n self._reading=\"r\"in mode\n self._writing=\"w\"in mode\n self._timeout_occurred=False\n \n def readinto(self,b):\n ''\n\n\n\n\n\n \n self._checkClosed()\n self._checkReadable()\n if self._timeout_occurred:\n raise OSError(\"cannot read from timed out object\")\n while True :\n try :\n return self._sock.recv_into(b)\n except timeout:\n self._timeout_occurred=True\n raise\n except error as e:\n if e.args[0]in _blocking_errnos:\n return None\n raise\n \n def write(self,b):\n ''\n\n\n\n \n self._checkClosed()\n self._checkWritable()\n try :\n return self._sock.send(b)\n except error as e:\n \n if e.args[0]in _blocking_errnos:\n return None\n raise\n \n def readable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._reading\n \n def writable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return self._writing\n \n def seekable(self):\n ''\n \n if self.closed:\n raise ValueError(\"I/O operation on closed socket.\")\n return super().seekable()\n \n def fileno(self):\n ''\n \n self._checkClosed()\n return self._sock.fileno()\n \n @property\n def name(self):\n if not self.closed:\n return self.fileno()\n else :\n return -1\n \n @property\n def mode(self):\n return self._mode\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n io.RawIOBase.close(self)\n self._sock._decref_socketios()\n self._sock=None\n \n \ndef getfqdn(name=''):\n ''\n\n\n\n\n\n\n \n name=name.strip()\n if not name or name =='0.0.0.0':\n name=gethostname()\n try :\n hostname,aliases,ipaddrs=gethostbyaddr(name)\n except error:\n pass\n else :\n aliases.insert(0,hostname)\n for name in aliases:\n if '.'in name:\n break\n else :\n name=hostname\n return name\n \n \n_GLOBAL_DEFAULT_TIMEOUT=object()\n\ndef create_connection(address,timeout=_GLOBAL_DEFAULT_TIMEOUT,\nsource_address=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n \n host,port=address\n err=None\n for res in getaddrinfo(host,port,0,SOCK_STREAM):\n af,socktype,proto,canonname,sa=res\n sock=None\n try :\n sock=socket(af,socktype,proto)\n if timeout is not _GLOBAL_DEFAULT_TIMEOUT:\n sock.settimeout(timeout)\n if source_address:\n sock.bind(source_address)\n sock.connect(sa)\n \n err=None\n return sock\n \n except error as _:\n err=_\n if sock is not None :\n sock.close()\n \n if err is not None :\n try :\n raise err\n finally :\n \n err=None\n else :\n raise error(\"getaddrinfo returns an empty list\")\n \n \ndef has_dualstack_ipv6():\n ''\n\n \n if not has_ipv6\\\n or not hasattr(_socket,'IPPROTO_IPV6')\\\n or not hasattr(_socket,'IPV6_V6ONLY'):\n return False\n try :\n with socket(AF_INET6,SOCK_STREAM)as sock:\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,0)\n return True\n except error:\n return False\n \n \ndef create_server(address,*,family=AF_INET,backlog=None ,reuse_port=False ,\ndualstack_ipv6=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if reuse_port and not hasattr(_socket,\"SO_REUSEPORT\"):\n raise ValueError(\"SO_REUSEPORT not supported on this platform\")\n if dualstack_ipv6:\n if not has_dualstack_ipv6():\n raise ValueError(\"dualstack_ipv6 not supported on this platform\")\n if family !=AF_INET6:\n raise ValueError(\"dualstack_ipv6 requires AF_INET6 family\")\n sock=socket(family,SOCK_STREAM)\n try :\n \n \n \n \n \n \n \n \n \n if os.name not in ('nt','cygwin')and\\\n hasattr(_socket,'SO_REUSEADDR'):\n try :\n sock.setsockopt(SOL_SOCKET,SO_REUSEADDR,1)\n except error:\n \n \n pass\n if reuse_port:\n sock.setsockopt(SOL_SOCKET,SO_REUSEPORT,1)\n if has_ipv6 and family ==AF_INET6:\n if dualstack_ipv6:\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,0)\n elif hasattr(_socket,\"IPV6_V6ONLY\")and\\\n hasattr(_socket,\"IPPROTO_IPV6\"):\n sock.setsockopt(IPPROTO_IPV6,IPV6_V6ONLY,1)\n try :\n sock.bind(address)\n except error as err:\n msg='%s (while attempting to bind on address %r)'%\\\n (err.strerror,address)\n raise error(err.errno,msg)from None\n if backlog is None :\n sock.listen()\n else :\n sock.listen(backlog)\n return sock\n except error:\n sock.close()\n raise\n \n \ndef getaddrinfo(host,port,family=0,type=0,proto=0,flags=0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n addrlist=[]\n for res in _socket.getaddrinfo(host,port,family,type,proto,flags):\n af,socktype,proto,canonname,sa=res\n addrlist.append((_intenum_converter(af,AddressFamily),\n _intenum_converter(socktype,SocketKind),\n proto,canonname,sa))\n return addrlist\n", ["_socket", "array", "enum", "errno", "io", "os", "selectors", "sys"]], "sre_compile": [".py", "\n\n\n\n\n\n\n\n\n\n\"\"\"Internal support module for sre\"\"\"\n\nimport _sre\nimport sre_parse\nfrom sre_constants import *\n\nassert _sre.MAGIC ==MAGIC,\"SRE module mismatch\"\n\n_LITERAL_CODES={LITERAL,NOT_LITERAL}\n_REPEATING_CODES={REPEAT,MIN_REPEAT,MAX_REPEAT}\n_SUCCESS_CODES={SUCCESS,FAILURE}\n_ASSERT_CODES={ASSERT,ASSERT_NOT}\n_UNIT_CODES=_LITERAL_CODES |{ANY,IN}\n\n\n_equivalences=(\n\n(0x69,0x131),\n\n(0x73,0x17f),\n\n(0xb5,0x3bc),\n\n(0x345,0x3b9,0x1fbe),\n\n(0x390,0x1fd3),\n\n(0x3b0,0x1fe3),\n\n(0x3b2,0x3d0),\n\n(0x3b5,0x3f5),\n\n(0x3b8,0x3d1),\n\n(0x3ba,0x3f0),\n\n(0x3c0,0x3d6),\n\n(0x3c1,0x3f1),\n\n(0x3c2,0x3c3),\n\n(0x3c6,0x3d5),\n\n(0x1e61,0x1e9b),\n\n(0xfb05,0xfb06),\n)\n\n\n_ignorecase_fixes={i:tuple(j for j in t if i !=j)\nfor t in _equivalences for i in t}\n\ndef _combine_flags(flags,add_flags,del_flags,\nTYPE_FLAGS=sre_parse.TYPE_FLAGS):\n if add_flags&TYPE_FLAGS:\n flags &=~TYPE_FLAGS\n return (flags |add_flags)&~del_flags\n \ndef _compile(code,pattern,flags):\n\n emit=code.append\n _len=len\n LITERAL_CODES=_LITERAL_CODES\n REPEATING_CODES=_REPEATING_CODES\n SUCCESS_CODES=_SUCCESS_CODES\n ASSERT_CODES=_ASSERT_CODES\n iscased=None\n tolower=None\n fixes=None\n if flags&SRE_FLAG_IGNORECASE and not flags&SRE_FLAG_LOCALE:\n if flags&SRE_FLAG_UNICODE:\n iscased=_sre.unicode_iscased\n tolower=_sre.unicode_tolower\n fixes=_ignorecase_fixes\n else :\n iscased=_sre.ascii_iscased\n tolower=_sre.ascii_tolower\n for op,av in pattern:\n if op in LITERAL_CODES:\n if not flags&SRE_FLAG_IGNORECASE:\n emit(op)\n emit(av)\n elif flags&SRE_FLAG_LOCALE:\n emit(OP_LOCALE_IGNORE[op])\n emit(av)\n elif not iscased(av):\n emit(op)\n emit(av)\n else :\n lo=tolower(av)\n if not fixes:\n emit(OP_IGNORE[op])\n emit(lo)\n elif lo not in fixes:\n emit(OP_UNICODE_IGNORE[op])\n emit(lo)\n else :\n emit(IN_UNI_IGNORE)\n skip=_len(code);emit(0)\n if op is NOT_LITERAL:\n emit(NEGATE)\n for k in (lo,)+fixes[lo]:\n emit(LITERAL)\n emit(k)\n emit(FAILURE)\n code[skip]=_len(code)-skip\n elif op is IN:\n charset,hascased=_optimize_charset(av,iscased,tolower,fixes)\n if flags&SRE_FLAG_IGNORECASE and flags&SRE_FLAG_LOCALE:\n emit(IN_LOC_IGNORE)\n elif not hascased:\n emit(IN)\n elif not fixes:\n emit(IN_IGNORE)\n else :\n emit(IN_UNI_IGNORE)\n skip=_len(code);emit(0)\n _compile_charset(charset,flags,code)\n code[skip]=_len(code)-skip\n elif op is ANY:\n if flags&SRE_FLAG_DOTALL:\n emit(ANY_ALL)\n else :\n emit(ANY)\n elif op in REPEATING_CODES:\n if flags&SRE_FLAG_TEMPLATE:\n raise error(\"internal: unsupported template operator %r\"%(op,))\n if _simple(av[2]):\n if op is MAX_REPEAT:\n emit(REPEAT_ONE)\n else :\n emit(MIN_REPEAT_ONE)\n skip=_len(code);emit(0)\n emit(av[0])\n emit(av[1])\n _compile(code,av[2],flags)\n emit(SUCCESS)\n code[skip]=_len(code)-skip\n else :\n emit(REPEAT)\n skip=_len(code);emit(0)\n emit(av[0])\n emit(av[1])\n _compile(code,av[2],flags)\n code[skip]=_len(code)-skip\n if op is MAX_REPEAT:\n emit(MAX_UNTIL)\n else :\n emit(MIN_UNTIL)\n elif op is SUBPATTERN:\n group,add_flags,del_flags,p=av\n if group:\n emit(MARK)\n emit((group -1)*2)\n \n _compile(code,p,_combine_flags(flags,add_flags,del_flags))\n if group:\n emit(MARK)\n emit((group -1)*2+1)\n elif op in SUCCESS_CODES:\n emit(op)\n elif op in ASSERT_CODES:\n emit(op)\n skip=_len(code);emit(0)\n if av[0]>=0:\n emit(0)\n else :\n lo,hi=av[1].getwidth()\n if lo !=hi:\n raise error(\"look-behind requires fixed-width pattern\")\n emit(lo)\n _compile(code,av[1],flags)\n emit(SUCCESS)\n code[skip]=_len(code)-skip\n elif op is CALL:\n emit(op)\n skip=_len(code);emit(0)\n _compile(code,av,flags)\n emit(SUCCESS)\n code[skip]=_len(code)-skip\n elif op is AT:\n emit(op)\n if flags&SRE_FLAG_MULTILINE:\n av=AT_MULTILINE.get(av,av)\n if flags&SRE_FLAG_LOCALE:\n av=AT_LOCALE.get(av,av)\n elif flags&SRE_FLAG_UNICODE:\n av=AT_UNICODE.get(av,av)\n emit(av)\n elif op is BRANCH:\n emit(op)\n tail=[]\n tailappend=tail.append\n for av in av[1]:\n skip=_len(code);emit(0)\n \n _compile(code,av,flags)\n emit(JUMP)\n tailappend(_len(code));emit(0)\n code[skip]=_len(code)-skip\n emit(FAILURE)\n for tail in tail:\n code[tail]=_len(code)-tail\n elif op is CATEGORY:\n emit(op)\n if flags&SRE_FLAG_LOCALE:\n av=CH_LOCALE[av]\n elif flags&SRE_FLAG_UNICODE:\n av=CH_UNICODE[av]\n emit(av)\n elif op is GROUPREF:\n if not flags&SRE_FLAG_IGNORECASE:\n emit(op)\n elif flags&SRE_FLAG_LOCALE:\n emit(GROUPREF_LOC_IGNORE)\n elif not fixes:\n emit(GROUPREF_IGNORE)\n else :\n emit(GROUPREF_UNI_IGNORE)\n emit(av -1)\n elif op is GROUPREF_EXISTS:\n emit(op)\n emit(av[0]-1)\n skipyes=_len(code);emit(0)\n _compile(code,av[1],flags)\n if av[2]:\n emit(JUMP)\n skipno=_len(code);emit(0)\n code[skipyes]=_len(code)-skipyes+1\n _compile(code,av[2],flags)\n code[skipno]=_len(code)-skipno\n else :\n code[skipyes]=_len(code)-skipyes+1\n else :\n raise error(\"internal: unsupported operand type %r\"%(op,))\n \ndef _compile_charset(charset,flags,code):\n\n emit=code.append\n for op,av in charset:\n emit(op)\n if op is NEGATE:\n pass\n elif op is LITERAL:\n emit(av)\n elif op is RANGE or op is RANGE_UNI_IGNORE:\n emit(av[0])\n emit(av[1])\n elif op is CHARSET:\n code.extend(av)\n elif op is BIGCHARSET:\n code.extend(av)\n elif op is CATEGORY:\n if flags&SRE_FLAG_LOCALE:\n emit(CH_LOCALE[av])\n elif flags&SRE_FLAG_UNICODE:\n emit(CH_UNICODE[av])\n else :\n emit(av)\n else :\n raise error(\"internal: unsupported set operator %r\"%(op,))\n emit(FAILURE)\n \ndef _optimize_charset(charset,iscased=None ,fixup=None ,fixes=None ):\n\n out=[]\n tail=[]\n charmap=bytearray(256)\n hascased=False\n for op,av in charset:\n while True :\n try :\n if op is LITERAL:\n if fixup:\n lo=fixup(av)\n charmap[lo]=1\n if fixes and lo in fixes:\n for k in fixes[lo]:\n charmap[k]=1\n if not hascased and iscased(av):\n hascased=True\n else :\n charmap[av]=1\n elif op is RANGE:\n r=range(av[0],av[1]+1)\n if fixup:\n if fixes:\n for i in map(fixup,r):\n charmap[i]=1\n if i in fixes:\n for k in fixes[i]:\n charmap[k]=1\n else :\n for i in map(fixup,r):\n charmap[i]=1\n if not hascased:\n hascased=any(map(iscased,r))\n else :\n for i in r:\n charmap[i]=1\n elif op is NEGATE:\n out.append((op,av))\n else :\n tail.append((op,av))\n except IndexError:\n if len(charmap)==256:\n \n charmap +=b'\\0'*0xff00\n continue\n \n if fixup:\n hascased=True\n \n \n \n if op is RANGE:\n op=RANGE_UNI_IGNORE\n tail.append((op,av))\n break\n \n \n runs=[]\n q=0\n while True :\n p=charmap.find(1,q)\n if p <0:\n break\n if len(runs)>=2:\n runs=None\n break\n q=charmap.find(0,p)\n if q <0:\n runs.append((p,len(charmap)))\n break\n runs.append((p,q))\n if runs is not None :\n \n for p,q in runs:\n if q -p ==1:\n out.append((LITERAL,p))\n else :\n out.append((RANGE,(p,q -1)))\n out +=tail\n \n if hascased or len(out)0xffff:\n return None\n if any(map(iscased,range(av[0],av[1]+1))):\n return None\n return charset\n return None\n \ndef _compile_info(code,pattern,flags):\n\n\n\n lo,hi=pattern.getwidth()\n if hi >MAXCODE:\n hi=MAXCODE\n if lo ==0:\n code.extend([INFO,4,0,lo,hi])\n return\n \n prefix=[]\n prefix_skip=0\n charset=[]\n if not (flags&SRE_FLAG_IGNORECASE and flags&SRE_FLAG_LOCALE):\n \n prefix,prefix_skip,got_all=_get_literal_prefix(pattern,flags)\n \n if not prefix:\n charset=_get_charset_prefix(pattern,flags)\n \n \n \n \n \n emit=code.append\n emit(INFO)\n skip=len(code);emit(0)\n \n mask=0\n if prefix:\n mask=SRE_INFO_PREFIX\n if prefix_skip is None and got_all:\n mask=mask |SRE_INFO_LITERAL\n elif charset:\n mask=mask |SRE_INFO_CHARSET\n emit(mask)\n \n if lo MAXGROUPS:\n raise error(\"too many groups\")\n if name is not None :\n ogid=self.groupdict.get(name,None )\n if ogid is not None :\n raise error(\"redefinition of group name %r as group %d; \"\n \"was group %d\"%(name,gid,ogid))\n self.groupdict[name]=gid\n return gid\n def closegroup(self,gid,p):\n self.groupwidths[gid]=p.getwidth()\n def checkgroup(self,gid):\n return gid =self.lookbehindgroups:\n raise source.error('cannot refer to group defined in the same '\n 'lookbehind subpattern')\n \nclass SubPattern:\n\n def __init__(self,state,data=None ):\n self.state=state\n if data is None :\n data=[]\n self.data=data\n self.width=None\n \n def dump(self,level=0):\n nl=True\n seqtypes=(tuple,list)\n for op,av in self.data:\n print(level *\" \"+str(op),end='')\n if op is IN:\n \n print()\n for op,a in av:\n print((level+1)*\" \"+str(op),a)\n elif op is BRANCH:\n print()\n for i,a in enumerate(av[1]):\n if i:\n print(level *\" \"+\"OR\")\n a.dump(level+1)\n elif op is GROUPREF_EXISTS:\n condgroup,item_yes,item_no=av\n print('',condgroup)\n item_yes.dump(level+1)\n if item_no:\n print(level *\" \"+\"ELSE\")\n item_no.dump(level+1)\n elif isinstance(av,seqtypes):\n nl=False\n for a in av:\n if isinstance(a,SubPattern):\n if not nl:\n print()\n a.dump(level+1)\n nl=True\n else :\n if not nl:\n print(' ',end='')\n print(a,end='')\n nl=False\n if not nl:\n print()\n else :\n print('',av)\n def __repr__(self):\n return repr(self.data)\n def __len__(self):\n return len(self.data)\n def __delitem__(self,index):\n del self.data[index]\n def __getitem__(self,index):\n if isinstance(index,slice):\n return SubPattern(self.state,self.data[index])\n return self.data[index]\n def __setitem__(self,index,code):\n self.data[index]=code\n def insert(self,index,code):\n self.data.insert(index,code)\n def append(self,code):\n self.data.append(code)\n def getwidth(self):\n \n if self.width is not None :\n return self.width\n lo=hi=0\n for op,av in self.data:\n if op is BRANCH:\n i=MAXREPEAT -1\n j=0\n for av in av[1]:\n l,h=av.getwidth()\n i=min(i,l)\n j=max(j,h)\n lo=lo+i\n hi=hi+j\n elif op is CALL:\n i,j=av.getwidth()\n lo=lo+i\n hi=hi+j\n elif op is SUBPATTERN:\n i,j=av[-1].getwidth()\n lo=lo+i\n hi=hi+j\n elif op in _REPEATCODES:\n i,j=av[2].getwidth()\n lo=lo+i *av[0]\n hi=hi+j *av[1]\n elif op in _UNITCODES:\n lo=lo+1\n hi=hi+1\n elif op is GROUPREF:\n i,j=self.state.groupwidths[av]\n lo=lo+i\n hi=hi+j\n elif op is GROUPREF_EXISTS:\n i,j=av[1].getwidth()\n if av[2]is not None :\n l,h=av[2].getwidth()\n i=min(i,l)\n j=max(j,h)\n else :\n i=0\n lo=lo+i\n hi=hi+j\n elif op is SUCCESS:\n break\n self.width=min(lo,MAXREPEAT -1),min(hi,MAXREPEAT)\n return self.width\n \nclass Tokenizer:\n def __init__(self,string):\n self.istext=isinstance(string,str)\n self.string=string\n if not self.istext:\n string=str(string,'latin1')\n self.decoded_string=string\n self.index=0\n self.next=None\n self.__next()\n def __next(self):\n index=self.index\n try :\n char=self.decoded_string[index]\n except IndexError:\n self.next=None\n return\n if char ==\"\\\\\":\n index +=1\n try :\n char +=self.decoded_string[index]\n except IndexError:\n raise error(\"bad escape (end of pattern)\",\n self.string,len(self.string)-1)from None\n self.index=index+1\n self.next=char\n def match(self,char):\n if char ==self.next:\n self.__next()\n return True\n return False\n def get(self):\n this=self.next\n self.__next()\n return this\n def getwhile(self,n,charset):\n result=''\n for _ in range(n):\n c=self.next\n if c not in charset:\n break\n result +=c\n self.__next()\n return result\n def getuntil(self,terminator,name):\n result=''\n while True :\n c=self.next\n self.__next()\n if c is None :\n if not result:\n raise self.error(\"missing \"+name)\n raise self.error(\"missing %s, unterminated name\"%terminator,\n len(result))\n if c ==terminator:\n if not result:\n raise self.error(\"missing \"+name,1)\n break\n result +=c\n return result\n @property\n def pos(self):\n return self.index -len(self.next or '')\n def tell(self):\n return self.index -len(self.next or '')\n def seek(self,index):\n self.index=index\n self.__next()\n \n def error(self,msg,offset=0):\n return error(msg,self.string,self.tell()-offset)\n \ndef _class_escape(source,escape):\n\n code=ESCAPES.get(escape)\n if code:\n return code\n code=CATEGORIES.get(escape)\n if code and code[0]is IN:\n return code\n try :\n c=escape[1:2]\n if c ==\"x\":\n \n escape +=source.getwhile(2,HEXDIGITS)\n if len(escape)!=4:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"u\"and source.istext:\n \n escape +=source.getwhile(4,HEXDIGITS)\n if len(escape)!=6:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"U\"and source.istext:\n \n escape +=source.getwhile(8,HEXDIGITS)\n if len(escape)!=10:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n c=int(escape[2:],16)\n chr(c)\n return LITERAL,c\n elif c ==\"N\"and source.istext:\n import unicodedata\n \n if not source.match('{'):\n raise source.error(\"missing {\")\n charname=source.getuntil('}','character name')\n try :\n c=ord(unicodedata.lookup(charname))\n except KeyError:\n raise source.error(\"undefined character name %r\"%charname,\n len(charname)+len(r'\\N{}'))\n return LITERAL,c\n elif c in OCTDIGITS:\n \n escape +=source.getwhile(2,OCTDIGITS)\n c=int(escape[1:],8)\n if c >0o377:\n raise source.error('octal escape value %s outside of '\n 'range 0-0o377'%escape,len(escape))\n return LITERAL,c\n elif c in DIGITS:\n raise ValueError\n if len(escape)==2:\n if c in ASCIILETTERS:\n raise source.error('bad escape %s'%escape,len(escape))\n return LITERAL,ord(escape[1])\n except ValueError:\n pass\n raise source.error(\"bad escape %s\"%escape,len(escape))\n \ndef _escape(source,escape,state):\n\n code=CATEGORIES.get(escape)\n if code:\n return code\n code=ESCAPES.get(escape)\n if code:\n return code\n try :\n c=escape[1:2]\n if c ==\"x\":\n \n escape +=source.getwhile(2,HEXDIGITS)\n if len(escape)!=4:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"u\"and source.istext:\n \n escape +=source.getwhile(4,HEXDIGITS)\n if len(escape)!=6:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n return LITERAL,int(escape[2:],16)\n elif c ==\"U\"and source.istext:\n \n escape +=source.getwhile(8,HEXDIGITS)\n if len(escape)!=10:\n raise source.error(\"incomplete escape %s\"%escape,len(escape))\n c=int(escape[2:],16)\n chr(c)\n return LITERAL,c\n elif c ==\"N\"and source.istext:\n import unicodedata\n \n if not source.match('{'):\n raise source.error(\"missing {\")\n charname=source.getuntil('}','character name')\n try :\n c=ord(unicodedata.lookup(charname))\n except KeyError:\n raise source.error(\"undefined character name %r\"%charname,\n len(charname)+len(r'\\N{}'))\n return LITERAL,c\n elif c ==\"0\":\n \n escape +=source.getwhile(2,OCTDIGITS)\n return LITERAL,int(escape[1:],8)\n elif c in DIGITS:\n \n if source.next in DIGITS:\n escape +=source.get()\n if (escape[1]in OCTDIGITS and escape[2]in OCTDIGITS and\n source.next in OCTDIGITS):\n \n escape +=source.get()\n c=int(escape[1:],8)\n if c >0o377:\n raise source.error('octal escape value %s outside of '\n 'range 0-0o377'%escape,\n len(escape))\n return LITERAL,c\n \n group=int(escape[1:])\n if group =MAXREPEAT:\n raise OverflowError(\"the repetition number is too large\")\n if hi:\n max=int(hi)\n if max >=MAXREPEAT:\n raise OverflowError(\"the repetition number is too large\")\n if max \",\"group name\")\n if not name.isidentifier():\n msg=\"bad character in group name %r\"%name\n raise source.error(msg,len(name)+1)\n elif sourcematch(\"=\"):\n \n name=source.getuntil(\")\",\"group name\")\n if not name.isidentifier():\n msg=\"bad character in group name %r\"%name\n raise source.error(msg,len(name)+1)\n gid=state.groupdict.get(name)\n if gid is None :\n msg=\"unknown group name %r\"%name\n raise source.error(msg,len(name)+1)\n if not state.checkgroup(gid):\n raise source.error(\"cannot refer to an open group\",\n len(name)+1)\n state.checklookbehindgroup(gid,source)\n subpatternappend((GROUPREF,gid))\n continue\n \n else :\n char=sourceget()\n if char is None :\n raise source.error(\"unexpected end of pattern\")\n raise source.error(\"unknown extension ?P\"+char,\n len(char)+2)\n elif char ==\":\":\n \n group=None\n elif char ==\"#\":\n \n while True :\n if source.next is None :\n raise source.error(\"missing ), unterminated comment\",\n source.tell()-start)\n if sourceget()==\")\":\n break\n continue\n \n elif char in \"=!<\":\n \n dir=1\n if char ==\"<\":\n char=sourceget()\n if char is None :\n raise source.error(\"unexpected end of pattern\")\n if char not in \"=!\":\n raise source.error(\"unknown extension ?<\"+char,\n len(char)+2)\n dir=-1\n lookbehindgroups=state.lookbehindgroups\n if lookbehindgroups is None :\n state.lookbehindgroups=state.groups\n p=_parse_sub(source,state,verbose,nested+1)\n if dir <0:\n if lookbehindgroups is None :\n state.lookbehindgroups=None\n if not sourcematch(\")\"):\n raise source.error(\"missing ), unterminated subpattern\",\n source.tell()-start)\n if char ==\"=\":\n subpatternappend((ASSERT,(dir,p)))\n else :\n subpatternappend((ASSERT_NOT,(dir,p)))\n continue\n \n elif char ==\"(\":\n \n condname=source.getuntil(\")\",\"group name\")\n if condname.isidentifier():\n condgroup=state.groupdict.get(condname)\n if condgroup is None :\n msg=\"unknown group name %r\"%condname\n raise source.error(msg,len(condname)+1)\n else :\n try :\n condgroup=int(condname)\n if condgroup <0:\n raise ValueError\n except ValueError:\n msg=\"bad character in group name %r\"%condname\n raise source.error(msg,len(condname)+1)from None\n if not condgroup:\n raise source.error(\"bad group number\",\n len(condname)+1)\n if condgroup >=MAXGROUPS:\n msg=\"invalid group reference %d\"%condgroup\n raise source.error(msg,len(condname)+1)\n state.checklookbehindgroup(condgroup,source)\n item_yes=_parse(source,state,verbose,nested+1)\n if source.match(\"|\"):\n item_no=_parse(source,state,verbose,nested+1)\n if source.next ==\"|\":\n raise source.error(\"conditional backref with more than two branches\")\n else :\n item_no=None\n if not source.match(\")\"):\n raise source.error(\"missing ), unterminated subpattern\",\n source.tell()-start)\n subpatternappend((GROUPREF_EXISTS,(condgroup,item_yes,item_no)))\n continue\n \n elif char in FLAGS or char ==\"-\":\n \n flags=_parse_flags(source,state,char)\n if flags is None :\n if not first or subpattern:\n import warnings\n warnings.warn(\n 'Flags not at the start of the expression %r%s'%(\n source.string[:20],\n ' (truncated)'if len(source.string)>20 else '',\n ),\n DeprecationWarning,stacklevel=nested+6\n )\n if (state.flags&SRE_FLAG_VERBOSE)and not verbose:\n raise Verbose\n continue\n \n add_flags,del_flags=flags\n group=None\n else :\n raise source.error(\"unknown extension ?\"+char,\n len(char)+1)\n \n \n if group is not None :\n try :\n group=state.opengroup(name)\n except error as err:\n raise source.error(err.msg,len(name)+1)from None\n sub_verbose=((verbose or (add_flags&SRE_FLAG_VERBOSE))and\n not (del_flags&SRE_FLAG_VERBOSE))\n p=_parse_sub(source,state,sub_verbose,nested+1)\n if not source.match(\")\"):\n raise source.error(\"missing ), unterminated subpattern\",\n source.tell()-start)\n if group is not None :\n state.closegroup(group,p)\n subpatternappend((SUBPATTERN,(group,add_flags,del_flags,p)))\n \n elif this ==\"^\":\n subpatternappend((AT,AT_BEGINNING))\n \n elif this ==\"$\":\n subpatternappend((AT,AT_END))\n \n else :\n raise AssertionError(\"unsupported special character %r\"%(char,))\n \n \n for i in range(len(subpattern))[::-1]:\n op,av=subpattern[i]\n if op is SUBPATTERN:\n group,add_flags,del_flags,p=av\n if group is None and not add_flags and not del_flags:\n subpattern[i:i+1]=p\n \n return subpattern\n \ndef _parse_flags(source,state,char):\n sourceget=source.get\n add_flags=0\n del_flags=0\n if char !=\"-\":\n while True :\n flag=FLAGS[char]\n if source.istext:\n if char =='L':\n msg=\"bad inline flags: cannot use 'L' flag with a str pattern\"\n raise source.error(msg)\n else :\n if char =='u':\n msg=\"bad inline flags: cannot use 'u' flag with a bytes pattern\"\n raise source.error(msg)\n add_flags |=flag\n if (flag&TYPE_FLAGS)and (add_flags&TYPE_FLAGS)!=flag:\n msg=\"bad inline flags: flags 'a', 'u' and 'L' are incompatible\"\n raise source.error(msg)\n char=sourceget()\n if char is None :\n raise source.error(\"missing -, : or )\")\n if char in \")-:\":\n break\n if char not in FLAGS:\n msg=\"unknown flag\"if char.isalpha()else \"missing -, : or )\"\n raise source.error(msg,len(char))\n if char ==\")\":\n state.flags |=add_flags\n return None\n if add_flags&GLOBAL_FLAGS:\n raise source.error(\"bad inline flags: cannot turn on global flag\",1)\n if char ==\"-\":\n char=sourceget()\n if char is None :\n raise source.error(\"missing flag\")\n if char not in FLAGS:\n msg=\"unknown flag\"if char.isalpha()else \"missing flag\"\n raise source.error(msg,len(char))\n while True :\n flag=FLAGS[char]\n if flag&TYPE_FLAGS:\n msg=\"bad inline flags: cannot turn off flags 'a', 'u' and 'L'\"\n raise source.error(msg)\n del_flags |=flag\n char=sourceget()\n if char is None :\n raise source.error(\"missing :\")\n if char ==\":\":\n break\n if char not in FLAGS:\n msg=\"unknown flag\"if char.isalpha()else \"missing :\"\n raise source.error(msg,len(char))\n assert char ==\":\"\n if del_flags&GLOBAL_FLAGS:\n raise source.error(\"bad inline flags: cannot turn off global flag\",1)\n if add_flags&del_flags:\n raise source.error(\"bad inline flags: flag turned on and off\",1)\n return add_flags,del_flags\n \ndef fix_flags(src,flags):\n\n if isinstance(src,str):\n if flags&SRE_FLAG_LOCALE:\n raise ValueError(\"cannot use LOCALE flag with a str pattern\")\n if not flags&SRE_FLAG_ASCII:\n flags |=SRE_FLAG_UNICODE\n elif flags&SRE_FLAG_UNICODE:\n raise ValueError(\"ASCII and UNICODE flags are incompatible\")\n else :\n if flags&SRE_FLAG_UNICODE:\n raise ValueError(\"cannot use UNICODE flag with a bytes pattern\")\n if flags&SRE_FLAG_LOCALE and flags&SRE_FLAG_ASCII:\n raise ValueError(\"ASCII and LOCALE flags are incompatible\")\n return flags\n \ndef parse(str,flags=0,state=None ):\n\n\n source=Tokenizer(str)\n \n if state is None :\n state=State()\n state.flags=flags\n state.str=str\n \n try :\n p=_parse_sub(source,state,flags&SRE_FLAG_VERBOSE,0)\n except Verbose:\n \n \n state=State()\n state.flags=flags |SRE_FLAG_VERBOSE\n state.str=str\n source.seek(0)\n p=_parse_sub(source,state,True ,0)\n \n p.state.flags=fix_flags(str,p.state.flags)\n \n if source.next is not None :\n assert source.next ==\")\"\n raise source.error(\"unbalanced parenthesis\")\n \n if flags&SRE_FLAG_DEBUG:\n p.dump()\n \n return p\n \ndef parse_template(source,state):\n\n\n s=Tokenizer(source)\n sget=s.get\n groups=[]\n literals=[]\n literal=[]\n lappend=literal.append\n def addgroup(index,pos):\n if index >state.groups:\n raise s.error(\"invalid group reference %d\"%index,pos)\n if literal:\n literals.append(''.join(literal))\n del literal[:]\n groups.append((len(literals),index))\n literals.append(None )\n groupindex=state.groupindex\n while True :\n this=sget()\n if this is None :\n break\n if this[0]==\"\\\\\":\n \n c=this[1]\n if c ==\"g\":\n name=\"\"\n if not s.match(\"<\"):\n raise s.error(\"missing <\")\n name=s.getuntil(\">\",\"group name\")\n if name.isidentifier():\n try :\n index=groupindex[name]\n except KeyError:\n raise IndexError(\"unknown group name %r\"%name)\n else :\n try :\n index=int(name)\n if index <0:\n raise ValueError\n except ValueError:\n raise s.error(\"bad character in group name %r\"%name,\n len(name)+1)from None\n if index >=MAXGROUPS:\n raise s.error(\"invalid group reference %d\"%index,\n len(name)+1)\n addgroup(index,len(name)+1)\n elif c ==\"0\":\n if s.next in OCTDIGITS:\n this +=sget()\n if s.next in OCTDIGITS:\n this +=sget()\n lappend(chr(int(this[1:],8)&0xff))\n elif c in DIGITS:\n isoctal=False\n if s.next in DIGITS:\n this +=sget()\n if (c in OCTDIGITS and this[2]in OCTDIGITS and\n s.next in OCTDIGITS):\n this +=sget()\n isoctal=True\n c=int(this[1:],8)\n if c >0o377:\n raise s.error('octal escape value %s outside of '\n 'range 0-0o377'%this,len(this))\n lappend(chr(c))\n if not isoctal:\n addgroup(int(this[1:]),len(this)-1)\n else :\n try :\n this=chr(ESCAPES[this][1])\n except KeyError:\n if c in ASCIILETTERS:\n raise s.error('bad escape %s'%this,len(this))\n lappend(this)\n else :\n lappend(this)\n if literal:\n literals.append(''.join(literal))\n if not isinstance(source,str):\n \n \n literals=[None if s is None else s.encode('latin-1')for s in literals]\n return groups,literals\n \ndef expand_template(template,match):\n g=match.group\n empty=match.string[:0]\n groups,literals=template\n literals=literals[:]\n try :\n for index,group in groups:\n literals[index]=g(group)or empty\n except IndexError:\n raise error(\"invalid group reference %d\"%index)\n return empty.join(literals)\n", ["sre_constants", "unicodedata", "warnings"]], "stat": [".py", "''\n\n\n\n\n\n\nST_MODE=0\nST_INO=1\nST_DEV=2\nST_NLINK=3\nST_UID=4\nST_GID=5\nST_SIZE=6\nST_ATIME=7\nST_MTIME=8\nST_CTIME=9\n\n\n\ndef S_IMODE(mode):\n ''\n\n \n return mode&0o7777\n \ndef S_IFMT(mode):\n ''\n\n \n return mode&0o170000\n \n \n \n \nS_IFDIR=0o040000\nS_IFCHR=0o020000\nS_IFBLK=0o060000\nS_IFREG=0o100000\nS_IFIFO=0o010000\nS_IFLNK=0o120000\nS_IFSOCK=0o140000\n\nS_IFDOOR=0\nS_IFPORT=0\nS_IFWHT=0\n\n\n\ndef S_ISDIR(mode):\n ''\n return S_IFMT(mode)==S_IFDIR\n \ndef S_ISCHR(mode):\n ''\n return S_IFMT(mode)==S_IFCHR\n \ndef S_ISBLK(mode):\n ''\n return S_IFMT(mode)==S_IFBLK\n \ndef S_ISREG(mode):\n ''\n return S_IFMT(mode)==S_IFREG\n \ndef S_ISFIFO(mode):\n ''\n return S_IFMT(mode)==S_IFIFO\n \ndef S_ISLNK(mode):\n ''\n return S_IFMT(mode)==S_IFLNK\n \ndef S_ISSOCK(mode):\n ''\n return S_IFMT(mode)==S_IFSOCK\n \ndef S_ISDOOR(mode):\n ''\n return False\n \ndef S_ISPORT(mode):\n ''\n return False\n \ndef S_ISWHT(mode):\n ''\n return False\n \n \n \nS_ISUID=0o4000\nS_ISGID=0o2000\nS_ENFMT=S_ISGID\nS_ISVTX=0o1000\nS_IREAD=0o0400\nS_IWRITE=0o0200\nS_IEXEC=0o0100\nS_IRWXU=0o0700\nS_IRUSR=0o0400\nS_IWUSR=0o0200\nS_IXUSR=0o0100\nS_IRWXG=0o0070\nS_IRGRP=0o0040\nS_IWGRP=0o0020\nS_IXGRP=0o0010\nS_IRWXO=0o0007\nS_IROTH=0o0004\nS_IWOTH=0o0002\nS_IXOTH=0o0001\n\n\n\nUF_NODUMP=0x00000001\nUF_IMMUTABLE=0x00000002\nUF_APPEND=0x00000004\nUF_OPAQUE=0x00000008\nUF_NOUNLINK=0x00000010\nUF_COMPRESSED=0x00000020\nUF_HIDDEN=0x00008000\nSF_ARCHIVED=0x00010000\nSF_IMMUTABLE=0x00020000\nSF_APPEND=0x00040000\nSF_NOUNLINK=0x00100000\nSF_SNAPSHOT=0x00200000\n\n\n_filemode_table=(\n((S_IFLNK,\"l\"),\n(S_IFSOCK,\"s\"),\n(S_IFREG,\"-\"),\n(S_IFBLK,\"b\"),\n(S_IFDIR,\"d\"),\n(S_IFCHR,\"c\"),\n(S_IFIFO,\"p\")),\n\n((S_IRUSR,\"r\"),),\n((S_IWUSR,\"w\"),),\n((S_IXUSR |S_ISUID,\"s\"),\n(S_ISUID,\"S\"),\n(S_IXUSR,\"x\")),\n\n((S_IRGRP,\"r\"),),\n((S_IWGRP,\"w\"),),\n((S_IXGRP |S_ISGID,\"s\"),\n(S_ISGID,\"S\"),\n(S_IXGRP,\"x\")),\n\n((S_IROTH,\"r\"),),\n((S_IWOTH,\"w\"),),\n((S_IXOTH |S_ISVTX,\"t\"),\n(S_ISVTX,\"T\"),\n(S_IXOTH,\"x\"))\n)\n\ndef filemode(mode):\n ''\n perm=[]\n for table in _filemode_table:\n for bit,char in table:\n if mode&bit ==bit:\n perm.append(char)\n break\n else :\n perm.append(\"-\")\n return \"\".join(perm)\n \n \n \n \n \nFILE_ATTRIBUTE_ARCHIVE=32\nFILE_ATTRIBUTE_COMPRESSED=2048\nFILE_ATTRIBUTE_DEVICE=64\nFILE_ATTRIBUTE_DIRECTORY=16\nFILE_ATTRIBUTE_ENCRYPTED=16384\nFILE_ATTRIBUTE_HIDDEN=2\nFILE_ATTRIBUTE_INTEGRITY_STREAM=32768\nFILE_ATTRIBUTE_NORMAL=128\nFILE_ATTRIBUTE_NOT_CONTENT_INDEXED=8192\nFILE_ATTRIBUTE_NO_SCRUB_DATA=131072\nFILE_ATTRIBUTE_OFFLINE=4096\nFILE_ATTRIBUTE_READONLY=1\nFILE_ATTRIBUTE_REPARSE_POINT=1024\nFILE_ATTRIBUTE_SPARSE_FILE=512\nFILE_ATTRIBUTE_SYSTEM=4\nFILE_ATTRIBUTE_TEMPORARY=256\nFILE_ATTRIBUTE_VIRTUAL=65536\n\n\n\ntry :\n from _stat import *\nexcept ImportError:\n pass\n", ["_stat"]], "string": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\"ascii_letters\",\"ascii_lowercase\",\"ascii_uppercase\",\"capwords\",\n\"digits\",\"hexdigits\",\"octdigits\",\"printable\",\"punctuation\",\n\"whitespace\",\"Formatter\",\"Template\"]\n\nimport _string\n\n\nwhitespace=' \\t\\n\\r\\v\\f'\nascii_lowercase='abcdefghijklmnopqrstuvwxyz'\nascii_uppercase='ABCDEFGHIJKLMNOPQRSTUVWXYZ'\nascii_letters=ascii_lowercase+ascii_uppercase\ndigits='0123456789'\nhexdigits=digits+'abcdef'+'ABCDEF'\noctdigits='01234567'\npunctuation=r\"\"\"!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~\"\"\"\nprintable=digits+ascii_letters+punctuation+whitespace\n\n\n\n\ndef capwords(s,sep=None ):\n ''\n\n\n\n\n\n\n\n\n \n return (sep or ' ').join(x.capitalize()for x in s.split(sep))\n \n \n \nimport re as _re\nfrom collections import ChainMap as _ChainMap\n\n_sentinel_dict={}\n\nclass Template:\n ''\n \n delimiter='$'\n \n \n \n \n idpattern=r'(?a:[_a-z][_a-z0-9]*)'\n braceidpattern=None\n flags=_re.IGNORECASE\n \n def __init_subclass__(cls):\n super().__init_subclass__()\n if 'pattern'in cls.__dict__:\n pattern=cls.pattern\n else :\n delim=_re.escape(cls.delimiter)\n id=cls.idpattern\n bid=cls.braceidpattern or cls.idpattern\n pattern=fr\"\"\"\n {delim}(?:\n (?P{delim}) | # Escape sequence of two delimiters\n (?P{id}) | # delimiter and a Python identifier\n {{(?P{bid})}} | # delimiter and a braced identifier\n (?P) # Other ill-formed delimiter exprs\n )\n \"\"\"\n cls.pattern=_re.compile(pattern,cls.flags |_re.VERBOSE)\n \n def __init__(self,template):\n self.template=template\n \n \n \n def _invalid(self,mo):\n i=mo.start('invalid')\n lines=self.template[:i].splitlines(keepends=True )\n if not lines:\n colno=1\n lineno=1\n else :\n colno=i -len(''.join(lines[:-1]))\n lineno=len(lines)\n raise ValueError('Invalid placeholder in string: line %d, col %d'%\n (lineno,colno))\n \n def substitute(self,mapping=_sentinel_dict,/,**kws):\n if mapping is _sentinel_dict:\n mapping=kws\n elif kws:\n mapping=_ChainMap(kws,mapping)\n \n def convert(mo):\n \n named=mo.group('named')or mo.group('braced')\n if named is not None :\n return str(mapping[named])\n if mo.group('escaped')is not None :\n return self.delimiter\n if mo.group('invalid')is not None :\n self._invalid(mo)\n raise ValueError('Unrecognized named group in pattern',\n self.pattern)\n return self.pattern.sub(convert,self.template)\n \n def safe_substitute(self,mapping=_sentinel_dict,/,**kws):\n if mapping is _sentinel_dict:\n mapping=kws\n elif kws:\n mapping=_ChainMap(kws,mapping)\n \n def convert(mo):\n named=mo.group('named')or mo.group('braced')\n if named is not None :\n try :\n return str(mapping[named])\n except KeyError:\n return mo.group()\n if mo.group('escaped')is not None :\n return self.delimiter\n if mo.group('invalid')is not None :\n return mo.group()\n raise ValueError('Unrecognized named group in pattern',\n self.pattern)\n return self.pattern.sub(convert,self.template)\n \n \n \nTemplate.__init_subclass__()\n\n\n\n\n\n\n\n\n\n\n\n\nclass Formatter:\n def format(self,format_string,/,*args,**kwargs):\n return self.vformat(format_string,args,kwargs)\n \n def vformat(self,format_string,args,kwargs):\n used_args=set()\n result,_=self._vformat(format_string,args,kwargs,used_args,2)\n self.check_unused_args(used_args,args,kwargs)\n return result\n \n def _vformat(self,format_string,args,kwargs,used_args,recursion_depth,\n auto_arg_index=0):\n if recursion_depth <0:\n raise ValueError('Max string recursion exceeded')\n result=[]\n for literal_text,field_name,format_spec,conversion in\\\n self.parse(format_string):\n \n \n if literal_text:\n result.append(literal_text)\n \n \n if field_name is not None :\n \n \n \n \n if field_name =='':\n if auto_arg_index is False :\n raise ValueError('cannot switch from manual field '\n 'specification to automatic field '\n 'numbering')\n field_name=str(auto_arg_index)\n auto_arg_index +=1\n elif field_name.isdigit():\n if auto_arg_index:\n raise ValueError('cannot switch from manual field '\n 'specification to automatic field '\n 'numbering')\n \n \n auto_arg_index=False\n \n \n \n obj,arg_used=self.get_field(field_name,args,kwargs)\n used_args.add(arg_used)\n \n \n obj=self.convert_field(obj,conversion)\n \n \n format_spec,auto_arg_index=self._vformat(\n format_spec,args,kwargs,\n used_args,recursion_depth -1,\n auto_arg_index=auto_arg_index)\n \n \n result.append(self.format_field(obj,format_spec))\n \n return ''.join(result),auto_arg_index\n \n \n def get_value(self,key,args,kwargs):\n if isinstance(key,int):\n return args[key]\n else :\n return kwargs[key]\n \n \n def check_unused_args(self,used_args,args,kwargs):\n pass\n \n \n def format_field(self,value,format_spec):\n return format(value,format_spec)\n \n \n def convert_field(self,value,conversion):\n \n if conversion is None :\n return value\n elif conversion =='s':\n return str(value)\n elif conversion =='r':\n return repr(value)\n elif conversion =='a':\n return ascii(value)\n raise ValueError(\"Unknown conversion specifier {0!s}\".format(conversion))\n \n \n \n \n \n \n \n \n \n def parse(self,format_string):\n return _string.formatter_parser(format_string)\n \n \n \n \n \n \n \n def get_field(self,field_name,args,kwargs):\n first,rest=_string.formatter_field_name_split(field_name)\n \n obj=self.get_value(first,args,kwargs)\n \n \n \n for is_attr,i in rest:\n if is_attr:\n obj=getattr(obj,i)\n else :\n obj=obj[i]\n \n return obj,first\n", ["_string", "collections", "re"]], "stringprep": [".py", "\n''\n\n\n\n\n\nfrom unicodedata import ucd_3_2_0 as unicodedata\n\nassert unicodedata.unidata_version =='3.2.0'\n\ndef in_table_a1(code):\n if unicodedata.category(code)!='Cn':return False\n c=ord(code)\n if 0xFDD0 <=c <0xFDF0:return False\n return (c&0xFFFF)not in (0xFFFE,0xFFFF)\n \n \nb1_set=set([173,847,6150,6155,6156,6157,8203,8204,8205,8288,65279]+list(range(65024,65040)))\ndef in_table_b1(code):\n return ord(code)in b1_set\n \n \nb3_exceptions={\n0xb5:'\\u03bc',0xdf:'ss',0x130:'i\\u0307',0x149:'\\u02bcn',\n0x17f:'s',0x1f0:'j\\u030c',0x345:'\\u03b9',0x37a:' \\u03b9',\n0x390:'\\u03b9\\u0308\\u0301',0x3b0:'\\u03c5\\u0308\\u0301',0x3c2:'\\u03c3',0x3d0:'\\u03b2',\n0x3d1:'\\u03b8',0x3d2:'\\u03c5',0x3d3:'\\u03cd',0x3d4:'\\u03cb',\n0x3d5:'\\u03c6',0x3d6:'\\u03c0',0x3f0:'\\u03ba',0x3f1:'\\u03c1',\n0x3f2:'\\u03c3',0x3f5:'\\u03b5',0x587:'\\u0565\\u0582',0x1e96:'h\\u0331',\n0x1e97:'t\\u0308',0x1e98:'w\\u030a',0x1e99:'y\\u030a',0x1e9a:'a\\u02be',\n0x1e9b:'\\u1e61',0x1f50:'\\u03c5\\u0313',0x1f52:'\\u03c5\\u0313\\u0300',0x1f54:'\\u03c5\\u0313\\u0301',\n0x1f56:'\\u03c5\\u0313\\u0342',0x1f80:'\\u1f00\\u03b9',0x1f81:'\\u1f01\\u03b9',0x1f82:'\\u1f02\\u03b9',\n0x1f83:'\\u1f03\\u03b9',0x1f84:'\\u1f04\\u03b9',0x1f85:'\\u1f05\\u03b9',0x1f86:'\\u1f06\\u03b9',\n0x1f87:'\\u1f07\\u03b9',0x1f88:'\\u1f00\\u03b9',0x1f89:'\\u1f01\\u03b9',0x1f8a:'\\u1f02\\u03b9',\n0x1f8b:'\\u1f03\\u03b9',0x1f8c:'\\u1f04\\u03b9',0x1f8d:'\\u1f05\\u03b9',0x1f8e:'\\u1f06\\u03b9',\n0x1f8f:'\\u1f07\\u03b9',0x1f90:'\\u1f20\\u03b9',0x1f91:'\\u1f21\\u03b9',0x1f92:'\\u1f22\\u03b9',\n0x1f93:'\\u1f23\\u03b9',0x1f94:'\\u1f24\\u03b9',0x1f95:'\\u1f25\\u03b9',0x1f96:'\\u1f26\\u03b9',\n0x1f97:'\\u1f27\\u03b9',0x1f98:'\\u1f20\\u03b9',0x1f99:'\\u1f21\\u03b9',0x1f9a:'\\u1f22\\u03b9',\n0x1f9b:'\\u1f23\\u03b9',0x1f9c:'\\u1f24\\u03b9',0x1f9d:'\\u1f25\\u03b9',0x1f9e:'\\u1f26\\u03b9',\n0x1f9f:'\\u1f27\\u03b9',0x1fa0:'\\u1f60\\u03b9',0x1fa1:'\\u1f61\\u03b9',0x1fa2:'\\u1f62\\u03b9',\n0x1fa3:'\\u1f63\\u03b9',0x1fa4:'\\u1f64\\u03b9',0x1fa5:'\\u1f65\\u03b9',0x1fa6:'\\u1f66\\u03b9',\n0x1fa7:'\\u1f67\\u03b9',0x1fa8:'\\u1f60\\u03b9',0x1fa9:'\\u1f61\\u03b9',0x1faa:'\\u1f62\\u03b9',\n0x1fab:'\\u1f63\\u03b9',0x1fac:'\\u1f64\\u03b9',0x1fad:'\\u1f65\\u03b9',0x1fae:'\\u1f66\\u03b9',\n0x1faf:'\\u1f67\\u03b9',0x1fb2:'\\u1f70\\u03b9',0x1fb3:'\\u03b1\\u03b9',0x1fb4:'\\u03ac\\u03b9',\n0x1fb6:'\\u03b1\\u0342',0x1fb7:'\\u03b1\\u0342\\u03b9',0x1fbc:'\\u03b1\\u03b9',0x1fbe:'\\u03b9',\n0x1fc2:'\\u1f74\\u03b9',0x1fc3:'\\u03b7\\u03b9',0x1fc4:'\\u03ae\\u03b9',0x1fc6:'\\u03b7\\u0342',\n0x1fc7:'\\u03b7\\u0342\\u03b9',0x1fcc:'\\u03b7\\u03b9',0x1fd2:'\\u03b9\\u0308\\u0300',0x1fd3:'\\u03b9\\u0308\\u0301',\n0x1fd6:'\\u03b9\\u0342',0x1fd7:'\\u03b9\\u0308\\u0342',0x1fe2:'\\u03c5\\u0308\\u0300',0x1fe3:'\\u03c5\\u0308\\u0301',\n0x1fe4:'\\u03c1\\u0313',0x1fe6:'\\u03c5\\u0342',0x1fe7:'\\u03c5\\u0308\\u0342',0x1ff2:'\\u1f7c\\u03b9',\n0x1ff3:'\\u03c9\\u03b9',0x1ff4:'\\u03ce\\u03b9',0x1ff6:'\\u03c9\\u0342',0x1ff7:'\\u03c9\\u0342\\u03b9',\n0x1ffc:'\\u03c9\\u03b9',0x20a8:'rs',0x2102:'c',0x2103:'\\xb0c',\n0x2107:'\\u025b',0x2109:'\\xb0f',0x210b:'h',0x210c:'h',\n0x210d:'h',0x2110:'i',0x2111:'i',0x2112:'l',\n0x2115:'n',0x2116:'no',0x2119:'p',0x211a:'q',\n0x211b:'r',0x211c:'r',0x211d:'r',0x2120:'sm',\n0x2121:'tel',0x2122:'tm',0x2124:'z',0x2128:'z',\n0x212c:'b',0x212d:'c',0x2130:'e',0x2131:'f',\n0x2133:'m',0x213e:'\\u03b3',0x213f:'\\u03c0',0x2145:'d',\n0x3371:'hpa',0x3373:'au',0x3375:'ov',0x3380:'pa',\n0x3381:'na',0x3382:'\\u03bca',0x3383:'ma',0x3384:'ka',\n0x3385:'kb',0x3386:'mb',0x3387:'gb',0x338a:'pf',\n0x338b:'nf',0x338c:'\\u03bcf',0x3390:'hz',0x3391:'khz',\n0x3392:'mhz',0x3393:'ghz',0x3394:'thz',0x33a9:'pa',\n0x33aa:'kpa',0x33ab:'mpa',0x33ac:'gpa',0x33b4:'pv',\n0x33b5:'nv',0x33b6:'\\u03bcv',0x33b7:'mv',0x33b8:'kv',\n0x33b9:'mv',0x33ba:'pw',0x33bb:'nw',0x33bc:'\\u03bcw',\n0x33bd:'mw',0x33be:'kw',0x33bf:'mw',0x33c0:'k\\u03c9',\n0x33c1:'m\\u03c9',0x33c3:'bq',0x33c6:'c\\u2215kg',0x33c7:'co.',\n0x33c8:'db',0x33c9:'gy',0x33cb:'hp',0x33cd:'kk',\n0x33ce:'km',0x33d7:'ph',0x33d9:'ppm',0x33da:'pr',\n0x33dc:'sv',0x33dd:'wb',0xfb00:'ff',0xfb01:'fi',\n0xfb02:'fl',0xfb03:'ffi',0xfb04:'ffl',0xfb05:'st',\n0xfb06:'st',0xfb13:'\\u0574\\u0576',0xfb14:'\\u0574\\u0565',0xfb15:'\\u0574\\u056b',\n0xfb16:'\\u057e\\u0576',0xfb17:'\\u0574\\u056d',0x1d400:'a',0x1d401:'b',\n0x1d402:'c',0x1d403:'d',0x1d404:'e',0x1d405:'f',\n0x1d406:'g',0x1d407:'h',0x1d408:'i',0x1d409:'j',\n0x1d40a:'k',0x1d40b:'l',0x1d40c:'m',0x1d40d:'n',\n0x1d40e:'o',0x1d40f:'p',0x1d410:'q',0x1d411:'r',\n0x1d412:'s',0x1d413:'t',0x1d414:'u',0x1d415:'v',\n0x1d416:'w',0x1d417:'x',0x1d418:'y',0x1d419:'z',\n0x1d434:'a',0x1d435:'b',0x1d436:'c',0x1d437:'d',\n0x1d438:'e',0x1d439:'f',0x1d43a:'g',0x1d43b:'h',\n0x1d43c:'i',0x1d43d:'j',0x1d43e:'k',0x1d43f:'l',\n0x1d440:'m',0x1d441:'n',0x1d442:'o',0x1d443:'p',\n0x1d444:'q',0x1d445:'r',0x1d446:'s',0x1d447:'t',\n0x1d448:'u',0x1d449:'v',0x1d44a:'w',0x1d44b:'x',\n0x1d44c:'y',0x1d44d:'z',0x1d468:'a',0x1d469:'b',\n0x1d46a:'c',0x1d46b:'d',0x1d46c:'e',0x1d46d:'f',\n0x1d46e:'g',0x1d46f:'h',0x1d470:'i',0x1d471:'j',\n0x1d472:'k',0x1d473:'l',0x1d474:'m',0x1d475:'n',\n0x1d476:'o',0x1d477:'p',0x1d478:'q',0x1d479:'r',\n0x1d47a:'s',0x1d47b:'t',0x1d47c:'u',0x1d47d:'v',\n0x1d47e:'w',0x1d47f:'x',0x1d480:'y',0x1d481:'z',\n0x1d49c:'a',0x1d49e:'c',0x1d49f:'d',0x1d4a2:'g',\n0x1d4a5:'j',0x1d4a6:'k',0x1d4a9:'n',0x1d4aa:'o',\n0x1d4ab:'p',0x1d4ac:'q',0x1d4ae:'s',0x1d4af:'t',\n0x1d4b0:'u',0x1d4b1:'v',0x1d4b2:'w',0x1d4b3:'x',\n0x1d4b4:'y',0x1d4b5:'z',0x1d4d0:'a',0x1d4d1:'b',\n0x1d4d2:'c',0x1d4d3:'d',0x1d4d4:'e',0x1d4d5:'f',\n0x1d4d6:'g',0x1d4d7:'h',0x1d4d8:'i',0x1d4d9:'j',\n0x1d4da:'k',0x1d4db:'l',0x1d4dc:'m',0x1d4dd:'n',\n0x1d4de:'o',0x1d4df:'p',0x1d4e0:'q',0x1d4e1:'r',\n0x1d4e2:'s',0x1d4e3:'t',0x1d4e4:'u',0x1d4e5:'v',\n0x1d4e6:'w',0x1d4e7:'x',0x1d4e8:'y',0x1d4e9:'z',\n0x1d504:'a',0x1d505:'b',0x1d507:'d',0x1d508:'e',\n0x1d509:'f',0x1d50a:'g',0x1d50d:'j',0x1d50e:'k',\n0x1d50f:'l',0x1d510:'m',0x1d511:'n',0x1d512:'o',\n0x1d513:'p',0x1d514:'q',0x1d516:'s',0x1d517:'t',\n0x1d518:'u',0x1d519:'v',0x1d51a:'w',0x1d51b:'x',\n0x1d51c:'y',0x1d538:'a',0x1d539:'b',0x1d53b:'d',\n0x1d53c:'e',0x1d53d:'f',0x1d53e:'g',0x1d540:'i',\n0x1d541:'j',0x1d542:'k',0x1d543:'l',0x1d544:'m',\n0x1d546:'o',0x1d54a:'s',0x1d54b:'t',0x1d54c:'u',\n0x1d54d:'v',0x1d54e:'w',0x1d54f:'x',0x1d550:'y',\n0x1d56c:'a',0x1d56d:'b',0x1d56e:'c',0x1d56f:'d',\n0x1d570:'e',0x1d571:'f',0x1d572:'g',0x1d573:'h',\n0x1d574:'i',0x1d575:'j',0x1d576:'k',0x1d577:'l',\n0x1d578:'m',0x1d579:'n',0x1d57a:'o',0x1d57b:'p',\n0x1d57c:'q',0x1d57d:'r',0x1d57e:'s',0x1d57f:'t',\n0x1d580:'u',0x1d581:'v',0x1d582:'w',0x1d583:'x',\n0x1d584:'y',0x1d585:'z',0x1d5a0:'a',0x1d5a1:'b',\n0x1d5a2:'c',0x1d5a3:'d',0x1d5a4:'e',0x1d5a5:'f',\n0x1d5a6:'g',0x1d5a7:'h',0x1d5a8:'i',0x1d5a9:'j',\n0x1d5aa:'k',0x1d5ab:'l',0x1d5ac:'m',0x1d5ad:'n',\n0x1d5ae:'o',0x1d5af:'p',0x1d5b0:'q',0x1d5b1:'r',\n0x1d5b2:'s',0x1d5b3:'t',0x1d5b4:'u',0x1d5b5:'v',\n0x1d5b6:'w',0x1d5b7:'x',0x1d5b8:'y',0x1d5b9:'z',\n0x1d5d4:'a',0x1d5d5:'b',0x1d5d6:'c',0x1d5d7:'d',\n0x1d5d8:'e',0x1d5d9:'f',0x1d5da:'g',0x1d5db:'h',\n0x1d5dc:'i',0x1d5dd:'j',0x1d5de:'k',0x1d5df:'l',\n0x1d5e0:'m',0x1d5e1:'n',0x1d5e2:'o',0x1d5e3:'p',\n0x1d5e4:'q',0x1d5e5:'r',0x1d5e6:'s',0x1d5e7:'t',\n0x1d5e8:'u',0x1d5e9:'v',0x1d5ea:'w',0x1d5eb:'x',\n0x1d5ec:'y',0x1d5ed:'z',0x1d608:'a',0x1d609:'b',\n0x1d60a:'c',0x1d60b:'d',0x1d60c:'e',0x1d60d:'f',\n0x1d60e:'g',0x1d60f:'h',0x1d610:'i',0x1d611:'j',\n0x1d612:'k',0x1d613:'l',0x1d614:'m',0x1d615:'n',\n0x1d616:'o',0x1d617:'p',0x1d618:'q',0x1d619:'r',\n0x1d61a:'s',0x1d61b:'t',0x1d61c:'u',0x1d61d:'v',\n0x1d61e:'w',0x1d61f:'x',0x1d620:'y',0x1d621:'z',\n0x1d63c:'a',0x1d63d:'b',0x1d63e:'c',0x1d63f:'d',\n0x1d640:'e',0x1d641:'f',0x1d642:'g',0x1d643:'h',\n0x1d644:'i',0x1d645:'j',0x1d646:'k',0x1d647:'l',\n0x1d648:'m',0x1d649:'n',0x1d64a:'o',0x1d64b:'p',\n0x1d64c:'q',0x1d64d:'r',0x1d64e:'s',0x1d64f:'t',\n0x1d650:'u',0x1d651:'v',0x1d652:'w',0x1d653:'x',\n0x1d654:'y',0x1d655:'z',0x1d670:'a',0x1d671:'b',\n0x1d672:'c',0x1d673:'d',0x1d674:'e',0x1d675:'f',\n0x1d676:'g',0x1d677:'h',0x1d678:'i',0x1d679:'j',\n0x1d67a:'k',0x1d67b:'l',0x1d67c:'m',0x1d67d:'n',\n0x1d67e:'o',0x1d67f:'p',0x1d680:'q',0x1d681:'r',\n0x1d682:'s',0x1d683:'t',0x1d684:'u',0x1d685:'v',\n0x1d686:'w',0x1d687:'x',0x1d688:'y',0x1d689:'z',\n0x1d6a8:'\\u03b1',0x1d6a9:'\\u03b2',0x1d6aa:'\\u03b3',0x1d6ab:'\\u03b4',\n0x1d6ac:'\\u03b5',0x1d6ad:'\\u03b6',0x1d6ae:'\\u03b7',0x1d6af:'\\u03b8',\n0x1d6b0:'\\u03b9',0x1d6b1:'\\u03ba',0x1d6b2:'\\u03bb',0x1d6b3:'\\u03bc',\n0x1d6b4:'\\u03bd',0x1d6b5:'\\u03be',0x1d6b6:'\\u03bf',0x1d6b7:'\\u03c0',\n0x1d6b8:'\\u03c1',0x1d6b9:'\\u03b8',0x1d6ba:'\\u03c3',0x1d6bb:'\\u03c4',\n0x1d6bc:'\\u03c5',0x1d6bd:'\\u03c6',0x1d6be:'\\u03c7',0x1d6bf:'\\u03c8',\n0x1d6c0:'\\u03c9',0x1d6d3:'\\u03c3',0x1d6e2:'\\u03b1',0x1d6e3:'\\u03b2',\n0x1d6e4:'\\u03b3',0x1d6e5:'\\u03b4',0x1d6e6:'\\u03b5',0x1d6e7:'\\u03b6',\n0x1d6e8:'\\u03b7',0x1d6e9:'\\u03b8',0x1d6ea:'\\u03b9',0x1d6eb:'\\u03ba',\n0x1d6ec:'\\u03bb',0x1d6ed:'\\u03bc',0x1d6ee:'\\u03bd',0x1d6ef:'\\u03be',\n0x1d6f0:'\\u03bf',0x1d6f1:'\\u03c0',0x1d6f2:'\\u03c1',0x1d6f3:'\\u03b8',\n0x1d6f4:'\\u03c3',0x1d6f5:'\\u03c4',0x1d6f6:'\\u03c5',0x1d6f7:'\\u03c6',\n0x1d6f8:'\\u03c7',0x1d6f9:'\\u03c8',0x1d6fa:'\\u03c9',0x1d70d:'\\u03c3',\n0x1d71c:'\\u03b1',0x1d71d:'\\u03b2',0x1d71e:'\\u03b3',0x1d71f:'\\u03b4',\n0x1d720:'\\u03b5',0x1d721:'\\u03b6',0x1d722:'\\u03b7',0x1d723:'\\u03b8',\n0x1d724:'\\u03b9',0x1d725:'\\u03ba',0x1d726:'\\u03bb',0x1d727:'\\u03bc',\n0x1d728:'\\u03bd',0x1d729:'\\u03be',0x1d72a:'\\u03bf',0x1d72b:'\\u03c0',\n0x1d72c:'\\u03c1',0x1d72d:'\\u03b8',0x1d72e:'\\u03c3',0x1d72f:'\\u03c4',\n0x1d730:'\\u03c5',0x1d731:'\\u03c6',0x1d732:'\\u03c7',0x1d733:'\\u03c8',\n0x1d734:'\\u03c9',0x1d747:'\\u03c3',0x1d756:'\\u03b1',0x1d757:'\\u03b2',\n0x1d758:'\\u03b3',0x1d759:'\\u03b4',0x1d75a:'\\u03b5',0x1d75b:'\\u03b6',\n0x1d75c:'\\u03b7',0x1d75d:'\\u03b8',0x1d75e:'\\u03b9',0x1d75f:'\\u03ba',\n0x1d760:'\\u03bb',0x1d761:'\\u03bc',0x1d762:'\\u03bd',0x1d763:'\\u03be',\n0x1d764:'\\u03bf',0x1d765:'\\u03c0',0x1d766:'\\u03c1',0x1d767:'\\u03b8',\n0x1d768:'\\u03c3',0x1d769:'\\u03c4',0x1d76a:'\\u03c5',0x1d76b:'\\u03c6',\n0x1d76c:'\\u03c7',0x1d76d:'\\u03c8',0x1d76e:'\\u03c9',0x1d781:'\\u03c3',\n0x1d790:'\\u03b1',0x1d791:'\\u03b2',0x1d792:'\\u03b3',0x1d793:'\\u03b4',\n0x1d794:'\\u03b5',0x1d795:'\\u03b6',0x1d796:'\\u03b7',0x1d797:'\\u03b8',\n0x1d798:'\\u03b9',0x1d799:'\\u03ba',0x1d79a:'\\u03bb',0x1d79b:'\\u03bc',\n0x1d79c:'\\u03bd',0x1d79d:'\\u03be',0x1d79e:'\\u03bf',0x1d79f:'\\u03c0',\n0x1d7a0:'\\u03c1',0x1d7a1:'\\u03b8',0x1d7a2:'\\u03c3',0x1d7a3:'\\u03c4',\n0x1d7a4:'\\u03c5',0x1d7a5:'\\u03c6',0x1d7a6:'\\u03c7',0x1d7a7:'\\u03c8',\n0x1d7a8:'\\u03c9',0x1d7bb:'\\u03c3',}\n\ndef map_table_b3(code):\n r=b3_exceptions.get(ord(code))\n if r is not None :return r\n return code.lower()\n \n \ndef map_table_b2(a):\n al=map_table_b3(a)\n b=unicodedata.normalize(\"NFKC\",al)\n bl=\"\".join([map_table_b3(ch)for ch in b])\n c=unicodedata.normalize(\"NFKC\",bl)\n if b !=c:\n return c\n else :\n return al\n \n \ndef in_table_c11(code):\n return code ==\" \"\n \n \ndef in_table_c12(code):\n return unicodedata.category(code)==\"Zs\"and code !=\" \"\n \ndef in_table_c11_c12(code):\n return unicodedata.category(code)==\"Zs\"\n \n \ndef in_table_c21(code):\n return ord(code)<128 and unicodedata.category(code)==\"Cc\"\n \nc22_specials=set([1757,1807,6158,8204,8205,8232,8233,65279]+list(range(8288,8292))+list(range(8298,8304))+list(range(65529,65533))+list(range(119155,119163)))\ndef in_table_c22(code):\n c=ord(code)\n if c <128:return False\n if unicodedata.category(code)==\"Cc\":return True\n return c in c22_specials\n \ndef in_table_c21_c22(code):\n return unicodedata.category(code)==\"Cc\"or\\\n ord(code)in c22_specials\n \n \ndef in_table_c3(code):\n return unicodedata.category(code)==\"Co\"\n \n \ndef in_table_c4(code):\n c=ord(code)\n if c <0xFDD0:return False\n if c <0xFDF0:return True\n return (ord(code)&0xFFFF)in (0xFFFE,0xFFFF)\n \n \ndef in_table_c5(code):\n return unicodedata.category(code)==\"Cs\"\n \n \nc6_set=set(range(65529,65534))\ndef in_table_c6(code):\n return ord(code)in c6_set\n \n \nc7_set=set(range(12272,12284))\ndef in_table_c7(code):\n return ord(code)in c7_set\n \n \nc8_set=set([832,833,8206,8207]+list(range(8234,8239))+list(range(8298,8304)))\ndef in_table_c8(code):\n return ord(code)in c8_set\n \n \nc9_set=set([917505]+list(range(917536,917632)))\ndef in_table_c9(code):\n return ord(code)in c9_set\n \n \ndef in_table_d1(code):\n return unicodedata.bidirectional(code)in (\"R\",\"AL\")\n \n \ndef in_table_d2(code):\n return unicodedata.bidirectional(code)==\"L\"\n", ["unicodedata"]], "struct": [".py", "__all__=[\n\n'calcsize','pack','pack_into','unpack','unpack_from',\n'iter_unpack',\n\n\n'Struct',\n\n\n'error'\n]\n\nfrom _struct import *\nfrom _struct import _clearcache\nfrom _struct import __doc__\n", ["_struct"]], "subprocess": [".py", "\n\n\n\n\n\n\n\n\nr\"\"\"Subprocesses with accessible I/O streams\n\nThis module allows you to spawn processes, connect to their\ninput/output/error pipes, and obtain their return codes.\n\nFor a complete description of this module see the Python documentation.\n\nMain API\n========\nrun(...): Runs a command, waits for it to complete, then returns a\n CompletedProcess instance.\nPopen(...): A class for flexibly executing a command in a new process\n\nConstants\n---------\nDEVNULL: Special value that indicates that os.devnull should be used\nPIPE: Special value that indicates a pipe should be created\nSTDOUT: Special value that indicates that stderr should go to stdout\n\n\nOlder API\n=========\ncall(...): Runs a command, waits for it to complete, then returns\n the return code.\ncheck_call(...): Same as call() but raises CalledProcessError()\n if return code is not 0\ncheck_output(...): Same as check_call() but returns the contents of\n stdout instead of a return code\ngetoutput(...): Runs a command in the shell, waits for it to complete,\n then returns the output\ngetstatusoutput(...): Runs a command in the shell, waits for it to complete,\n then returns a (exitcode, output) tuple\n\"\"\"\n\nimport builtins\nimport errno\nimport io\nimport os\nimport time\nimport signal\nimport sys\nimport threading\nimport warnings\nimport contextlib\nfrom time import monotonic as _time\nimport types\n\ntry :\n import pwd\nexcept ImportError:\n pwd=None\ntry :\n import grp\nexcept ImportError:\n grp=None\n \n__all__=[\"Popen\",\"PIPE\",\"STDOUT\",\"call\",\"check_call\",\"getstatusoutput\",\n\"getoutput\",\"check_output\",\"run\",\"CalledProcessError\",\"DEVNULL\",\n\"SubprocessError\",\"TimeoutExpired\",\"CompletedProcess\"]\n\n\n\ntry :\n import msvcrt\n import _winapi\n _mswindows=True\nexcept ModuleNotFoundError:\n _mswindows=False\n import _posixsubprocess\n import select\n import selectors\nelse :\n from _winapi import (CREATE_NEW_CONSOLE,CREATE_NEW_PROCESS_GROUP,\n STD_INPUT_HANDLE,STD_OUTPUT_HANDLE,\n STD_ERROR_HANDLE,SW_HIDE,\n STARTF_USESTDHANDLES,STARTF_USESHOWWINDOW,\n ABOVE_NORMAL_PRIORITY_CLASS,BELOW_NORMAL_PRIORITY_CLASS,\n HIGH_PRIORITY_CLASS,IDLE_PRIORITY_CLASS,\n NORMAL_PRIORITY_CLASS,REALTIME_PRIORITY_CLASS,\n CREATE_NO_WINDOW,DETACHED_PROCESS,\n CREATE_DEFAULT_ERROR_MODE,CREATE_BREAKAWAY_FROM_JOB)\n \n __all__.extend([\"CREATE_NEW_CONSOLE\",\"CREATE_NEW_PROCESS_GROUP\",\n \"STD_INPUT_HANDLE\",\"STD_OUTPUT_HANDLE\",\n \"STD_ERROR_HANDLE\",\"SW_HIDE\",\n \"STARTF_USESTDHANDLES\",\"STARTF_USESHOWWINDOW\",\n \"STARTUPINFO\",\n \"ABOVE_NORMAL_PRIORITY_CLASS\",\"BELOW_NORMAL_PRIORITY_CLASS\",\n \"HIGH_PRIORITY_CLASS\",\"IDLE_PRIORITY_CLASS\",\n \"NORMAL_PRIORITY_CLASS\",\"REALTIME_PRIORITY_CLASS\",\n \"CREATE_NO_WINDOW\",\"DETACHED_PROCESS\",\n \"CREATE_DEFAULT_ERROR_MODE\",\"CREATE_BREAKAWAY_FROM_JOB\"])\n \n \n \nclass SubprocessError(Exception):pass\n\n\nclass CalledProcessError(SubprocessError):\n ''\n\n\n\n\n \n def __init__(self,returncode,cmd,output=None ,stderr=None ):\n self.returncode=returncode\n self.cmd=cmd\n self.output=output\n self.stderr=stderr\n \n def __str__(self):\n if self.returncode and self.returncode <0:\n try :\n return \"Command '%s' died with %r.\"%(\n self.cmd,signal.Signals(-self.returncode))\n except ValueError:\n return \"Command '%s' died with unknown signal %d.\"%(\n self.cmd,-self.returncode)\n else :\n return \"Command '%s' returned non-zero exit status %d.\"%(\n self.cmd,self.returncode)\n \n @property\n def stdout(self):\n ''\n return self.output\n \n @stdout.setter\n def stdout(self,value):\n \n \n self.output=value\n \n \nclass TimeoutExpired(SubprocessError):\n ''\n\n\n\n\n \n def __init__(self,cmd,timeout,output=None ,stderr=None ):\n self.cmd=cmd\n self.timeout=timeout\n self.output=output\n self.stderr=stderr\n \n def __str__(self):\n return (\"Command '%s' timed out after %s seconds\"%\n (self.cmd,self.timeout))\n \n @property\n def stdout(self):\n return self.output\n \n @stdout.setter\n def stdout(self,value):\n \n \n self.output=value\n \n \nif _mswindows:\n class STARTUPINFO:\n def __init__(self,*,dwFlags=0,hStdInput=None ,hStdOutput=None ,\n hStdError=None ,wShowWindow=0,lpAttributeList=None ):\n self.dwFlags=dwFlags\n self.hStdInput=hStdInput\n self.hStdOutput=hStdOutput\n self.hStdError=hStdError\n self.wShowWindow=wShowWindow\n self.lpAttributeList=lpAttributeList or {\"handle_list\":[]}\n \n def copy(self):\n attr_list=self.lpAttributeList.copy()\n if 'handle_list'in attr_list:\n attr_list['handle_list']=list(attr_list['handle_list'])\n \n return STARTUPINFO(dwFlags=self.dwFlags,\n hStdInput=self.hStdInput,\n hStdOutput=self.hStdOutput,\n hStdError=self.hStdError,\n wShowWindow=self.wShowWindow,\n lpAttributeList=attr_list)\n \n \n class Handle(int):\n closed=False\n \n def Close(self,CloseHandle=_winapi.CloseHandle):\n if not self.closed:\n self.closed=True\n CloseHandle(self)\n \n def Detach(self):\n if not self.closed:\n self.closed=True\n return int(self)\n raise ValueError(\"already closed\")\n \n def __repr__(self):\n return \"%s(%d)\"%(self.__class__.__name__,int(self))\n \n __del__=Close\nelse :\n\n\n\n _PIPE_BUF=getattr(select,'PIPE_BUF',512)\n \n \n \n \n if hasattr(selectors,'PollSelector'):\n _PopenSelector=selectors.PollSelector\n else :\n _PopenSelector=selectors.SelectSelector\n \n \nif _mswindows:\n\n\n\n\n\n\n\n\n _active=None\n \n def _cleanup():\n pass\nelse :\n\n\n\n\n _active=[]\n \n def _cleanup():\n if _active is None :\n return\n for inst in _active[:]:\n res=inst._internal_poll(_deadstate=sys.maxsize)\n if res is not None :\n try :\n _active.remove(inst)\n except ValueError:\n \n \n pass\n \nPIPE=-1\nSTDOUT=-2\nDEVNULL=-3\n\n\n\n\n\n\ndef _optim_args_from_interpreter_flags():\n ''\n \n args=[]\n value=sys.flags.optimize\n if value >0:\n args.append('-'+'O'*value)\n return args\n \n \ndef _args_from_interpreter_flags():\n ''\n \n flag_opt_map={\n 'debug':'d',\n \n \n 'dont_write_bytecode':'B',\n 'no_site':'S',\n 'verbose':'v',\n 'bytes_warning':'b',\n 'quiet':'q',\n \n }\n args=_optim_args_from_interpreter_flags()\n for flag,opt in flag_opt_map.items():\n v=getattr(sys.flags,flag)\n if v >0:\n args.append('-'+opt *v)\n \n if sys.flags.isolated:\n args.append('-I')\n else :\n if sys.flags.ignore_environment:\n args.append('-E')\n if sys.flags.no_user_site:\n args.append('-s')\n \n \n warnopts=sys.warnoptions[:]\n bytes_warning=sys.flags.bytes_warning\n xoptions=getattr(sys,'_xoptions',{})\n dev_mode=('dev'in xoptions)\n \n if bytes_warning >1:\n warnopts.remove(\"error::BytesWarning\")\n elif bytes_warning:\n warnopts.remove(\"default::BytesWarning\")\n if dev_mode:\n warnopts.remove('default')\n for opt in warnopts:\n args.append('-W'+opt)\n \n \n if dev_mode:\n args.extend(('-X','dev'))\n for opt in ('faulthandler','tracemalloc','importtime',\n 'showrefcount','utf8','oldparser'):\n if opt in xoptions:\n value=xoptions[opt]\n if value is True :\n arg=opt\n else :\n arg='%s=%s'%(opt,value)\n args.extend(('-X',arg))\n \n return args\n \n \ndef call(*popenargs,timeout=None ,**kwargs):\n ''\n\n\n\n\n\n \n with Popen(*popenargs,**kwargs)as p:\n try :\n return p.wait(timeout=timeout)\n except :\n p.kill()\n \n raise\n \n \ndef check_call(*popenargs,**kwargs):\n ''\n\n\n\n\n\n\n\n \n retcode=call(*popenargs,**kwargs)\n if retcode:\n cmd=kwargs.get(\"args\")\n if cmd is None :\n cmd=popenargs[0]\n raise CalledProcessError(retcode,cmd)\n return 0\n \n \ndef check_output(*popenargs,timeout=None ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if 'stdout'in kwargs:\n raise ValueError('stdout argument not allowed, it will be overridden.')\n \n if 'input'in kwargs and kwargs['input']is None :\n \n \n kwargs['input']=''if kwargs.get('universal_newlines',False )else b''\n \n return run(*popenargs,stdout=PIPE,timeout=timeout,check=True ,\n **kwargs).stdout\n \n \nclass CompletedProcess(object):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,args,returncode,stdout=None ,stderr=None ):\n self.args=args\n self.returncode=returncode\n self.stdout=stdout\n self.stderr=stderr\n \n def __repr__(self):\n args=['args={!r}'.format(self.args),\n 'returncode={!r}'.format(self.returncode)]\n if self.stdout is not None :\n args.append('stdout={!r}'.format(self.stdout))\n if self.stderr is not None :\n args.append('stderr={!r}'.format(self.stderr))\n return \"{}({})\".format(type(self).__name__,', '.join(args))\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n \n def check_returncode(self):\n ''\n if self.returncode:\n raise CalledProcessError(self.returncode,self.args,self.stdout,\n self.stderr)\n \n \ndef run(*popenargs,\ninput=None ,capture_output=False ,timeout=None ,check=False ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if input is not None :\n if kwargs.get('stdin')is not None :\n raise ValueError('stdin and input arguments may not both be used.')\n kwargs['stdin']=PIPE\n \n if capture_output:\n if kwargs.get('stdout')is not None or kwargs.get('stderr')is not None :\n raise ValueError('stdout and stderr arguments may not be used '\n 'with capture_output.')\n kwargs['stdout']=PIPE\n kwargs['stderr']=PIPE\n \n with Popen(*popenargs,**kwargs)as process:\n try :\n stdout,stderr=process.communicate(input,timeout=timeout)\n except TimeoutExpired as exc:\n process.kill()\n if _mswindows:\n \n \n \n \n \n exc.stdout,exc.stderr=process.communicate()\n else :\n \n \n process.wait()\n raise\n except :\n process.kill()\n \n raise\n retcode=process.poll()\n if check and retcode:\n raise CalledProcessError(retcode,process.args,\n output=stdout,stderr=stderr)\n return CompletedProcess(process.args,retcode,stdout,stderr)\n \n \ndef list2cmdline(seq):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n result=[]\n needquote=False\n for arg in map(os.fsdecode,seq):\n bs_buf=[]\n \n \n if result:\n result.append(' ')\n \n needquote=(\" \"in arg)or (\"\\t\"in arg)or not arg\n if needquote:\n result.append('\"')\n \n for c in arg:\n if c =='\\\\':\n \n bs_buf.append(c)\n elif c =='\"':\n \n result.append('\\\\'*len(bs_buf)*2)\n bs_buf=[]\n result.append('\\\\\"')\n else :\n \n if bs_buf:\n result.extend(bs_buf)\n bs_buf=[]\n result.append(c)\n \n \n if bs_buf:\n result.extend(bs_buf)\n \n if needquote:\n result.extend(bs_buf)\n result.append('\"')\n \n return ''.join(result)\n \n \n \n \n \ndef getstatusoutput(cmd):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n data=check_output(cmd,shell=True ,text=True ,stderr=STDOUT)\n exitcode=0\n except CalledProcessError as ex:\n data=ex.output\n exitcode=ex.returncode\n if data[-1:]=='\\n':\n data=data[:-1]\n return exitcode,data\n \ndef getoutput(cmd):\n ''\n\n\n\n\n\n\n\n \n return getstatusoutput(cmd)[1]\n \n \ndef _use_posix_spawn():\n ''\n\n\n\n\n\n\n\n\n\n\n \n if _mswindows or not hasattr(os,'posix_spawn'):\n \n return False\n \n if sys.platform =='darwin':\n \n return True\n \n \n try :\n ver=os.confstr('CS_GNU_LIBC_VERSION')\n \n parts=ver.split(maxsplit=1)\n if len(parts)!=2:\n \n raise ValueError\n libc=parts[0]\n version=tuple(map(int,parts[1].split('.')))\n \n if sys.platform =='linux'and libc =='glibc'and version >=(2,24):\n \n \n return True\n \n \n \n except (AttributeError,ValueError,OSError):\n \n pass\n \n \n return False\n \n \n_USE_POSIX_SPAWN=_use_posix_spawn()\n\n\nclass Popen(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n _child_created=False\n \n def __init__(self,args,bufsize=-1,executable=None ,\n stdin=None ,stdout=None ,stderr=None ,\n preexec_fn=None ,close_fds=True ,\n shell=False ,cwd=None ,env=None ,universal_newlines=None ,\n startupinfo=None ,creationflags=0,\n restore_signals=True ,start_new_session=False ,\n pass_fds=(),*,user=None ,group=None ,extra_groups=None ,\n encoding=None ,errors=None ,text=None ,umask=-1):\n ''\n _cleanup()\n \n \n \n \n \n self._waitpid_lock=threading.Lock()\n \n self._input=None\n self._communication_started=False\n if bufsize is None :\n bufsize=-1\n if not isinstance(bufsize,int):\n raise TypeError(\"bufsize must be an integer\")\n \n if _mswindows:\n if preexec_fn is not None :\n raise ValueError(\"preexec_fn is not supported on Windows \"\n \"platforms\")\n else :\n \n if pass_fds and not close_fds:\n warnings.warn(\"pass_fds overriding close_fds.\",RuntimeWarning)\n close_fds=True\n if startupinfo is not None :\n raise ValueError(\"startupinfo is only supported on Windows \"\n \"platforms\")\n if creationflags !=0:\n raise ValueError(\"creationflags is only supported on Windows \"\n \"platforms\")\n \n self.args=args\n self.stdin=None\n self.stdout=None\n self.stderr=None\n self.pid=None\n self.returncode=None\n self.encoding=encoding\n self.errors=errors\n \n \n if (text is not None and universal_newlines is not None\n and bool(universal_newlines)!=bool(text)):\n raise SubprocessError('Cannot disambiguate when both text '\n 'and universal_newlines are supplied but '\n 'different. Pass one or the other.')\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)=self._get_handles(stdin,stdout,stderr)\n \n \n \n \n \n if _mswindows:\n if p2cwrite !=-1:\n p2cwrite=msvcrt.open_osfhandle(p2cwrite.Detach(),0)\n if c2pread !=-1:\n c2pread=msvcrt.open_osfhandle(c2pread.Detach(),0)\n if errread !=-1:\n errread=msvcrt.open_osfhandle(errread.Detach(),0)\n \n self.text_mode=encoding or errors or text or universal_newlines\n \n \n \n \n self._sigint_wait_secs=0.25\n \n self._closed_child_pipe_fds=False\n \n if self.text_mode:\n if bufsize ==1:\n line_buffering=True\n \n \n bufsize=-1\n else :\n line_buffering=False\n \n gid=None\n if group is not None :\n if not hasattr(os,'setregid'):\n raise ValueError(\"The 'group' parameter is not supported on the \"\n \"current platform\")\n \n elif isinstance(group,str):\n if grp is None :\n raise ValueError(\"The group parameter cannot be a string \"\n \"on systems without the grp module\")\n \n gid=grp.getgrnam(group).gr_gid\n elif isinstance(group,int):\n gid=group\n else :\n raise TypeError(\"Group must be a string or an integer, not {}\"\n .format(type(group)))\n \n if gid <0:\n raise ValueError(f\"Group ID cannot be negative, got {gid}\")\n \n gids=None\n if extra_groups is not None :\n if not hasattr(os,'setgroups'):\n raise ValueError(\"The 'extra_groups' parameter is not \"\n \"supported on the current platform\")\n \n elif isinstance(extra_groups,str):\n raise ValueError(\"Groups must be a list, not a string\")\n \n gids=[]\n for extra_group in extra_groups:\n if isinstance(extra_group,str):\n if grp is None :\n raise ValueError(\"Items in extra_groups cannot be \"\n \"strings on systems without the \"\n \"grp module\")\n \n gids.append(grp.getgrnam(extra_group).gr_gid)\n elif isinstance(extra_group,int):\n gids.append(extra_group)\n else :\n raise TypeError(\"Items in extra_groups must be a string \"\n \"or integer, not {}\"\n .format(type(extra_group)))\n \n \n \n for gid_check in gids:\n if gid_check <0:\n raise ValueError(f\"Group ID cannot be negative, got {gid_check}\")\n \n uid=None\n if user is not None :\n if not hasattr(os,'setreuid'):\n raise ValueError(\"The 'user' parameter is not supported on \"\n \"the current platform\")\n \n elif isinstance(user,str):\n if pwd is None :\n raise ValueError(\"The user parameter cannot be a string \"\n \"on systems without the pwd module\")\n \n uid=pwd.getpwnam(user).pw_uid\n elif isinstance(user,int):\n uid=user\n else :\n raise TypeError(\"User must be a string or an integer\")\n \n if uid <0:\n raise ValueError(f\"User ID cannot be negative, got {uid}\")\n \n try :\n if p2cwrite !=-1:\n self.stdin=io.open(p2cwrite,'wb',bufsize)\n if self.text_mode:\n self.stdin=io.TextIOWrapper(self.stdin,write_through=True ,\n line_buffering=line_buffering,\n encoding=encoding,errors=errors)\n if c2pread !=-1:\n self.stdout=io.open(c2pread,'rb',bufsize)\n if self.text_mode:\n self.stdout=io.TextIOWrapper(self.stdout,\n encoding=encoding,errors=errors)\n if errread !=-1:\n self.stderr=io.open(errread,'rb',bufsize)\n if self.text_mode:\n self.stderr=io.TextIOWrapper(self.stderr,\n encoding=encoding,errors=errors)\n \n self._execute_child(args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n restore_signals,\n gid,gids,uid,umask,\n start_new_session)\n except :\n \n for f in filter(None ,(self.stdin,self.stdout,self.stderr)):\n try :\n f.close()\n except OSError:\n pass\n \n if not self._closed_child_pipe_fds:\n to_close=[]\n if stdin ==PIPE:\n to_close.append(p2cread)\n if stdout ==PIPE:\n to_close.append(c2pwrite)\n if stderr ==PIPE:\n to_close.append(errwrite)\n if hasattr(self,'_devnull'):\n to_close.append(self._devnull)\n for fd in to_close:\n try :\n if _mswindows and isinstance(fd,Handle):\n fd.Close()\n else :\n os.close(fd)\n except OSError:\n pass\n \n raise\n \n def __repr__(self):\n obj_repr=(\n f\"<{self.__class__.__name__}: \"\n f\"returncode: {self.returncode} args: {list(self.args)!r}>\"\n )\n if len(obj_repr)>80:\n obj_repr=obj_repr[:76]+\"...>\"\n return obj_repr\n \n __class_getitem__=classmethod(types.GenericAlias)\n \n @property\n def universal_newlines(self):\n \n \n return self.text_mode\n \n @universal_newlines.setter\n def universal_newlines(self,universal_newlines):\n self.text_mode=bool(universal_newlines)\n \n def _translate_newlines(self,data,encoding,errors):\n data=data.decode(encoding,errors)\n return data.replace(\"\\r\\n\",\"\\n\").replace(\"\\r\",\"\\n\")\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,value,traceback):\n if self.stdout:\n self.stdout.close()\n if self.stderr:\n self.stderr.close()\n try :\n if self.stdin:\n self.stdin.close()\n finally :\n if exc_type ==KeyboardInterrupt:\n \n \n \n \n \n \n \n if self._sigint_wait_secs >0:\n try :\n self._wait(timeout=self._sigint_wait_secs)\n except TimeoutExpired:\n pass\n self._sigint_wait_secs=0\n return\n \n \n self.wait()\n \n def __del__(self,_maxsize=sys.maxsize,_warn=warnings.warn):\n if not self._child_created:\n \n return\n if self.returncode is None :\n \n \n _warn(\"subprocess %s is still running\"%self.pid,\n ResourceWarning,source=self)\n \n self._internal_poll(_deadstate=_maxsize)\n if self.returncode is None and _active is not None :\n \n _active.append(self)\n \n def _get_devnull(self):\n if not hasattr(self,'_devnull'):\n self._devnull=os.open(os.devnull,os.O_RDWR)\n return self._devnull\n \n def _stdin_write(self,input):\n if input:\n try :\n self.stdin.write(input)\n except BrokenPipeError:\n pass\n except OSError as exc:\n if exc.errno ==errno.EINVAL:\n \n \n \n pass\n else :\n raise\n \n try :\n self.stdin.close()\n except BrokenPipeError:\n pass\n except OSError as exc:\n if exc.errno ==errno.EINVAL:\n pass\n else :\n raise\n \n def communicate(self,input=None ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if self._communication_started and input:\n raise ValueError(\"Cannot send input after starting communication\")\n \n \n \n \n if (timeout is None and not self._communication_started and\n [self.stdin,self.stdout,self.stderr].count(None )>=2):\n stdout=None\n stderr=None\n if self.stdin:\n self._stdin_write(input)\n elif self.stdout:\n stdout=self.stdout.read()\n self.stdout.close()\n elif self.stderr:\n stderr=self.stderr.read()\n self.stderr.close()\n self.wait()\n else :\n if timeout is not None :\n endtime=_time()+timeout\n else :\n endtime=None\n \n try :\n stdout,stderr=self._communicate(input,endtime,timeout)\n except KeyboardInterrupt:\n \n \n if timeout is not None :\n sigint_timeout=min(self._sigint_wait_secs,\n self._remaining_time(endtime))\n else :\n sigint_timeout=self._sigint_wait_secs\n self._sigint_wait_secs=0\n try :\n self._wait(timeout=sigint_timeout)\n except TimeoutExpired:\n pass\n raise\n \n finally :\n self._communication_started=True\n \n sts=self.wait(timeout=self._remaining_time(endtime))\n \n return (stdout,stderr)\n \n \n def poll(self):\n ''\n \n return self._internal_poll()\n \n \n def _remaining_time(self,endtime):\n ''\n if endtime is None :\n return None\n else :\n return endtime -_time()\n \n \n def _check_timeout(self,endtime,orig_timeout,stdout_seq,stderr_seq,\n skip_check_and_raise=False ):\n ''\n if endtime is None :\n return\n if skip_check_and_raise or _time()>endtime:\n raise TimeoutExpired(\n self.args,orig_timeout,\n output=b''.join(stdout_seq)if stdout_seq else None ,\n stderr=b''.join(stderr_seq)if stderr_seq else None )\n \n \n def wait(self,timeout=None ):\n ''\n if timeout is not None :\n endtime=_time()+timeout\n try :\n return self._wait(timeout=timeout)\n except KeyboardInterrupt:\n \n \n \n \n if timeout is not None :\n sigint_timeout=min(self._sigint_wait_secs,\n self._remaining_time(endtime))\n else :\n sigint_timeout=self._sigint_wait_secs\n self._sigint_wait_secs=0\n try :\n self._wait(timeout=sigint_timeout)\n except TimeoutExpired:\n pass\n raise\n \n def _close_pipe_fds(self,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite):\n \n devnull_fd=getattr(self,'_devnull',None )\n \n with contextlib.ExitStack()as stack:\n if _mswindows:\n if p2cread !=-1:\n stack.callback(p2cread.Close)\n if c2pwrite !=-1:\n stack.callback(c2pwrite.Close)\n if errwrite !=-1:\n stack.callback(errwrite.Close)\n else :\n if p2cread !=-1 and p2cwrite !=-1 and p2cread !=devnull_fd:\n stack.callback(os.close,p2cread)\n if c2pwrite !=-1 and c2pread !=-1 and c2pwrite !=devnull_fd:\n stack.callback(os.close,c2pwrite)\n if errwrite !=-1 and errread !=-1 and errwrite !=devnull_fd:\n stack.callback(os.close,errwrite)\n \n if devnull_fd is not None :\n stack.callback(os.close,devnull_fd)\n \n \n self._closed_child_pipe_fds=True\n \n if _mswindows:\n \n \n \n def _get_handles(self,stdin,stdout,stderr):\n ''\n\n \n if stdin is None and stdout is None and stderr is None :\n return (-1,-1,-1,-1,-1,-1)\n \n p2cread,p2cwrite=-1,-1\n c2pread,c2pwrite=-1,-1\n errread,errwrite=-1,-1\n \n if stdin is None :\n p2cread=_winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE)\n if p2cread is None :\n p2cread,_=_winapi.CreatePipe(None ,0)\n p2cread=Handle(p2cread)\n _winapi.CloseHandle(_)\n elif stdin ==PIPE:\n p2cread,p2cwrite=_winapi.CreatePipe(None ,0)\n p2cread,p2cwrite=Handle(p2cread),Handle(p2cwrite)\n elif stdin ==DEVNULL:\n p2cread=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stdin,int):\n p2cread=msvcrt.get_osfhandle(stdin)\n else :\n \n p2cread=msvcrt.get_osfhandle(stdin.fileno())\n p2cread=self._make_inheritable(p2cread)\n \n if stdout is None :\n c2pwrite=_winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE)\n if c2pwrite is None :\n _,c2pwrite=_winapi.CreatePipe(None ,0)\n c2pwrite=Handle(c2pwrite)\n _winapi.CloseHandle(_)\n elif stdout ==PIPE:\n c2pread,c2pwrite=_winapi.CreatePipe(None ,0)\n c2pread,c2pwrite=Handle(c2pread),Handle(c2pwrite)\n elif stdout ==DEVNULL:\n c2pwrite=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stdout,int):\n c2pwrite=msvcrt.get_osfhandle(stdout)\n else :\n \n c2pwrite=msvcrt.get_osfhandle(stdout.fileno())\n c2pwrite=self._make_inheritable(c2pwrite)\n \n if stderr is None :\n errwrite=_winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE)\n if errwrite is None :\n _,errwrite=_winapi.CreatePipe(None ,0)\n errwrite=Handle(errwrite)\n _winapi.CloseHandle(_)\n elif stderr ==PIPE:\n errread,errwrite=_winapi.CreatePipe(None ,0)\n errread,errwrite=Handle(errread),Handle(errwrite)\n elif stderr ==STDOUT:\n errwrite=c2pwrite\n elif stderr ==DEVNULL:\n errwrite=msvcrt.get_osfhandle(self._get_devnull())\n elif isinstance(stderr,int):\n errwrite=msvcrt.get_osfhandle(stderr)\n else :\n \n errwrite=msvcrt.get_osfhandle(stderr.fileno())\n errwrite=self._make_inheritable(errwrite)\n \n return (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n def _make_inheritable(self,handle):\n ''\n h=_winapi.DuplicateHandle(\n _winapi.GetCurrentProcess(),handle,\n _winapi.GetCurrentProcess(),0,1,\n _winapi.DUPLICATE_SAME_ACCESS)\n return Handle(h)\n \n \n def _filter_handle_list(self,handle_list):\n ''\n\n \n \n \n \n return list({handle for handle in handle_list\n if handle&0x3 !=0x3\n or _winapi.GetFileType(handle)!=\n _winapi.FILE_TYPE_CHAR})\n \n \n def _execute_child(self,args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n unused_restore_signals,\n unused_gid,unused_gids,unused_uid,\n unused_umask,\n unused_start_new_session):\n ''\n \n assert not pass_fds,\"pass_fds not supported on Windows.\"\n \n if isinstance(args,str):\n pass\n elif isinstance(args,bytes):\n if shell:\n raise TypeError('bytes args is not allowed on Windows')\n args=list2cmdline([args])\n elif isinstance(args,os.PathLike):\n if shell:\n raise TypeError('path-like args is not allowed when '\n 'shell is true')\n args=list2cmdline([args])\n else :\n args=list2cmdline(args)\n \n if executable is not None :\n executable=os.fsdecode(executable)\n \n \n if startupinfo is None :\n startupinfo=STARTUPINFO()\n else :\n \n \n startupinfo=startupinfo.copy()\n \n use_std_handles=-1 not in (p2cread,c2pwrite,errwrite)\n if use_std_handles:\n startupinfo.dwFlags |=_winapi.STARTF_USESTDHANDLES\n startupinfo.hStdInput=p2cread\n startupinfo.hStdOutput=c2pwrite\n startupinfo.hStdError=errwrite\n \n attribute_list=startupinfo.lpAttributeList\n have_handle_list=bool(attribute_list and\n \"handle_list\"in attribute_list and\n attribute_list[\"handle_list\"])\n \n \n if have_handle_list or (use_std_handles and close_fds):\n if attribute_list is None :\n attribute_list=startupinfo.lpAttributeList={}\n handle_list=attribute_list[\"handle_list\"]=\\\n list(attribute_list.get(\"handle_list\",[]))\n \n if use_std_handles:\n handle_list +=[int(p2cread),int(c2pwrite),int(errwrite)]\n \n handle_list[:]=self._filter_handle_list(handle_list)\n \n if handle_list:\n if not close_fds:\n warnings.warn(\"startupinfo.lpAttributeList['handle_list'] \"\n \"overriding close_fds\",RuntimeWarning)\n \n \n \n \n close_fds=False\n \n if shell:\n startupinfo.dwFlags |=_winapi.STARTF_USESHOWWINDOW\n startupinfo.wShowWindow=_winapi.SW_HIDE\n comspec=os.environ.get(\"COMSPEC\",\"cmd.exe\")\n args='{} /c \"{}\"'.format(comspec,args)\n \n if cwd is not None :\n cwd=os.fsdecode(cwd)\n \n sys.audit(\"subprocess.Popen\",executable,args,cwd,env)\n \n \n try :\n hp,ht,pid,tid=_winapi.CreateProcess(executable,args,\n \n None ,None ,\n int(not close_fds),\n creationflags,\n env,\n cwd,\n startupinfo)\n finally :\n \n \n \n \n \n \n self._close_pipe_fds(p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n self._child_created=True\n self._handle=Handle(hp)\n self.pid=pid\n _winapi.CloseHandle(ht)\n \n def _internal_poll(self,_deadstate=None ,\n _WaitForSingleObject=_winapi.WaitForSingleObject,\n _WAIT_OBJECT_0=_winapi.WAIT_OBJECT_0,\n _GetExitCodeProcess=_winapi.GetExitCodeProcess):\n ''\n\n\n\n\n\n \n if self.returncode is None :\n if _WaitForSingleObject(self._handle,0)==_WAIT_OBJECT_0:\n self.returncode=_GetExitCodeProcess(self._handle)\n return self.returncode\n \n \n def _wait(self,timeout):\n ''\n if timeout is None :\n timeout_millis=_winapi.INFINITE\n else :\n timeout_millis=int(timeout *1000)\n if self.returncode is None :\n \n result=_winapi.WaitForSingleObject(self._handle,\n timeout_millis)\n if result ==_winapi.WAIT_TIMEOUT:\n raise TimeoutExpired(self.args,timeout)\n self.returncode=_winapi.GetExitCodeProcess(self._handle)\n return self.returncode\n \n \n def _readerthread(self,fh,buffer):\n buffer.append(fh.read())\n fh.close()\n \n \n def _communicate(self,input,endtime,orig_timeout):\n \n \n if self.stdout and not hasattr(self,\"_stdout_buff\"):\n self._stdout_buff=[]\n self.stdout_thread=\\\n threading.Thread(target=self._readerthread,\n args=(self.stdout,self._stdout_buff))\n self.stdout_thread.daemon=True\n self.stdout_thread.start()\n if self.stderr and not hasattr(self,\"_stderr_buff\"):\n self._stderr_buff=[]\n self.stderr_thread=\\\n threading.Thread(target=self._readerthread,\n args=(self.stderr,self._stderr_buff))\n self.stderr_thread.daemon=True\n self.stderr_thread.start()\n \n if self.stdin:\n self._stdin_write(input)\n \n \n \n \n if self.stdout is not None :\n self.stdout_thread.join(self._remaining_time(endtime))\n if self.stdout_thread.is_alive():\n raise TimeoutExpired(self.args,orig_timeout)\n if self.stderr is not None :\n self.stderr_thread.join(self._remaining_time(endtime))\n if self.stderr_thread.is_alive():\n raise TimeoutExpired(self.args,orig_timeout)\n \n \n \n stdout=None\n stderr=None\n if self.stdout:\n stdout=self._stdout_buff\n self.stdout.close()\n if self.stderr:\n stderr=self._stderr_buff\n self.stderr.close()\n \n \n if stdout is not None :\n stdout=stdout[0]\n if stderr is not None :\n stderr=stderr[0]\n \n return (stdout,stderr)\n \n def send_signal(self,sig):\n ''\n \n if self.returncode is not None :\n return\n if sig ==signal.SIGTERM:\n self.terminate()\n elif sig ==signal.CTRL_C_EVENT:\n os.kill(self.pid,signal.CTRL_C_EVENT)\n elif sig ==signal.CTRL_BREAK_EVENT:\n os.kill(self.pid,signal.CTRL_BREAK_EVENT)\n else :\n raise ValueError(\"Unsupported signal: {}\".format(sig))\n \n def terminate(self):\n ''\n \n if self.returncode is not None :\n return\n try :\n _winapi.TerminateProcess(self._handle,1)\n except PermissionError:\n \n \n rc=_winapi.GetExitCodeProcess(self._handle)\n if rc ==_winapi.STILL_ACTIVE:\n raise\n self.returncode=rc\n \n kill=terminate\n \n else :\n \n \n \n def _get_handles(self,stdin,stdout,stderr):\n ''\n\n \n p2cread,p2cwrite=-1,-1\n c2pread,c2pwrite=-1,-1\n errread,errwrite=-1,-1\n \n if stdin is None :\n pass\n elif stdin ==PIPE:\n p2cread,p2cwrite=os.pipe()\n elif stdin ==DEVNULL:\n p2cread=self._get_devnull()\n elif isinstance(stdin,int):\n p2cread=stdin\n else :\n \n p2cread=stdin.fileno()\n \n if stdout is None :\n pass\n elif stdout ==PIPE:\n c2pread,c2pwrite=os.pipe()\n elif stdout ==DEVNULL:\n c2pwrite=self._get_devnull()\n elif isinstance(stdout,int):\n c2pwrite=stdout\n else :\n \n c2pwrite=stdout.fileno()\n \n if stderr is None :\n pass\n elif stderr ==PIPE:\n errread,errwrite=os.pipe()\n elif stderr ==STDOUT:\n if c2pwrite !=-1:\n errwrite=c2pwrite\n else :\n errwrite=sys.__stdout__.fileno()\n elif stderr ==DEVNULL:\n errwrite=self._get_devnull()\n elif isinstance(stderr,int):\n errwrite=stderr\n else :\n \n errwrite=stderr.fileno()\n \n return (p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n def _posix_spawn(self,args,executable,env,restore_signals,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite):\n ''\n if env is None :\n env=os.environ\n \n kwargs={}\n if restore_signals:\n \n sigset=[]\n for signame in ('SIGPIPE','SIGXFZ','SIGXFSZ'):\n signum=getattr(signal,signame,None )\n if signum is not None :\n sigset.append(signum)\n kwargs['setsigdef']=sigset\n \n file_actions=[]\n for fd in (p2cwrite,c2pread,errread):\n if fd !=-1:\n file_actions.append((os.POSIX_SPAWN_CLOSE,fd))\n for fd,fd2 in (\n (p2cread,0),\n (c2pwrite,1),\n (errwrite,2),\n ):\n if fd !=-1:\n file_actions.append((os.POSIX_SPAWN_DUP2,fd,fd2))\n if file_actions:\n kwargs['file_actions']=file_actions\n \n self.pid=os.posix_spawn(executable,args,env,**kwargs)\n self._child_created=True\n \n self._close_pipe_fds(p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n def _execute_child(self,args,executable,preexec_fn,close_fds,\n pass_fds,cwd,env,\n startupinfo,creationflags,shell,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite,\n restore_signals,\n gid,gids,uid,umask,\n start_new_session):\n ''\n \n if isinstance(args,(str,bytes)):\n args=[args]\n elif isinstance(args,os.PathLike):\n if shell:\n raise TypeError('path-like args is not allowed when '\n 'shell is true')\n args=[args]\n else :\n args=list(args)\n \n if shell:\n \n unix_shell=('/system/bin/sh'if\n hasattr(sys,'getandroidapilevel')else '/bin/sh')\n args=[unix_shell,\"-c\"]+args\n if executable:\n args[0]=executable\n \n if executable is None :\n executable=args[0]\n \n sys.audit(\"subprocess.Popen\",executable,args,cwd,env)\n \n if (_USE_POSIX_SPAWN\n and os.path.dirname(executable)\n and preexec_fn is None\n and not close_fds\n and not pass_fds\n and cwd is None\n and (p2cread ==-1 or p2cread >2)\n and (c2pwrite ==-1 or c2pwrite >2)\n and (errwrite ==-1 or errwrite >2)\n and not start_new_session\n and gid is None\n and gids is None\n and uid is None\n and umask <0):\n self._posix_spawn(args,executable,env,restore_signals,\n p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n return\n \n orig_executable=executable\n \n \n \n \n errpipe_read,errpipe_write=os.pipe()\n \n low_fds_to_close=[]\n while errpipe_write <3:\n low_fds_to_close.append(errpipe_write)\n errpipe_write=os.dup(errpipe_write)\n for low_fd in low_fds_to_close:\n os.close(low_fd)\n try :\n try :\n \n \n \n \n \n if env is not None :\n env_list=[]\n for k,v in env.items():\n k=os.fsencode(k)\n if b'='in k:\n raise ValueError(\"illegal environment variable name\")\n env_list.append(k+b'='+os.fsencode(v))\n else :\n env_list=None\n executable=os.fsencode(executable)\n if os.path.dirname(executable):\n executable_list=(executable,)\n else :\n \n executable_list=tuple(\n os.path.join(os.fsencode(dir),executable)\n for dir in os.get_exec_path(env))\n fds_to_keep=set(pass_fds)\n fds_to_keep.add(errpipe_write)\n self.pid=_posixsubprocess.fork_exec(\n args,executable_list,\n close_fds,tuple(sorted(map(int,fds_to_keep))),\n cwd,env_list,\n p2cread,p2cwrite,c2pread,c2pwrite,\n errread,errwrite,\n errpipe_read,errpipe_write,\n restore_signals,start_new_session,\n gid,gids,uid,umask,\n preexec_fn)\n self._child_created=True\n finally :\n \n os.close(errpipe_write)\n \n self._close_pipe_fds(p2cread,p2cwrite,\n c2pread,c2pwrite,\n errread,errwrite)\n \n \n \n errpipe_data=bytearray()\n while True :\n part=os.read(errpipe_read,50000)\n errpipe_data +=part\n if not part or len(errpipe_data)>50000:\n break\n finally :\n \n os.close(errpipe_read)\n \n if errpipe_data:\n try :\n pid,sts=os.waitpid(self.pid,0)\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n else :\n self.returncode=sys.maxsize\n except ChildProcessError:\n pass\n \n try :\n exception_name,hex_errno,err_msg=(\n errpipe_data.split(b':',2))\n \n \n \n err_msg=err_msg.decode()\n except ValueError:\n exception_name=b'SubprocessError'\n hex_errno=b'0'\n err_msg='Bad exception data from child: {!r}'.format(\n bytes(errpipe_data))\n child_exception_type=getattr(\n builtins,exception_name.decode('ascii'),\n SubprocessError)\n if issubclass(child_exception_type,OSError)and hex_errno:\n errno_num=int(hex_errno,16)\n child_exec_never_called=(err_msg ==\"noexec\")\n if child_exec_never_called:\n err_msg=\"\"\n \n err_filename=cwd\n else :\n err_filename=orig_executable\n if errno_num !=0:\n err_msg=os.strerror(errno_num)\n raise child_exception_type(errno_num,err_msg,err_filename)\n raise child_exception_type(err_msg)\n \n \n def _handle_exitstatus(self,sts,\n waitstatus_to_exitcode=os.waitstatus_to_exitcode,\n _WIFSTOPPED=os.WIFSTOPPED,\n _WSTOPSIG=os.WSTOPSIG):\n ''\n \n \n if _WIFSTOPPED(sts):\n self.returncode=-_WSTOPSIG(sts)\n else :\n self.returncode=waitstatus_to_exitcode(sts)\n \n def _internal_poll(self,_deadstate=None ,_waitpid=os.waitpid,\n _WNOHANG=os.WNOHANG,_ECHILD=errno.ECHILD):\n ''\n\n\n\n\n\n \n if self.returncode is None :\n if not self._waitpid_lock.acquire(False ):\n \n \n return None\n try :\n if self.returncode is not None :\n return self.returncode\n pid,sts=_waitpid(self.pid,_WNOHANG)\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n except OSError as e:\n if _deadstate is not None :\n self.returncode=_deadstate\n elif e.errno ==_ECHILD:\n \n \n \n \n \n self.returncode=0\n finally :\n self._waitpid_lock.release()\n return self.returncode\n \n \n def _try_wait(self,wait_flags):\n ''\n try :\n (pid,sts)=os.waitpid(self.pid,wait_flags)\n except ChildProcessError:\n \n \n \n pid=self.pid\n sts=0\n return (pid,sts)\n \n \n def _wait(self,timeout):\n ''\n if self.returncode is not None :\n return self.returncode\n \n if timeout is not None :\n endtime=_time()+timeout\n \n \n delay=0.0005\n while True :\n if self._waitpid_lock.acquire(False ):\n try :\n if self.returncode is not None :\n break\n (pid,sts)=self._try_wait(os.WNOHANG)\n assert pid ==self.pid or pid ==0\n if pid ==self.pid:\n self._handle_exitstatus(sts)\n break\n finally :\n self._waitpid_lock.release()\n remaining=self._remaining_time(endtime)\n if remaining <=0:\n raise TimeoutExpired(self.args,timeout)\n delay=min(delay *2,remaining,.05)\n time.sleep(delay)\n else :\n while self.returncode is None :\n with self._waitpid_lock:\n if self.returncode is not None :\n break\n (pid,sts)=self._try_wait(0)\n \n \n \n if pid ==self.pid:\n self._handle_exitstatus(sts)\n return self.returncode\n \n \n def _communicate(self,input,endtime,orig_timeout):\n if self.stdin and not self._communication_started:\n \n \n try :\n self.stdin.flush()\n except BrokenPipeError:\n pass\n if not input:\n try :\n self.stdin.close()\n except BrokenPipeError:\n pass\n \n stdout=None\n stderr=None\n \n \n if not self._communication_started:\n self._fileobj2output={}\n if self.stdout:\n self._fileobj2output[self.stdout]=[]\n if self.stderr:\n self._fileobj2output[self.stderr]=[]\n \n if self.stdout:\n stdout=self._fileobj2output[self.stdout]\n if self.stderr:\n stderr=self._fileobj2output[self.stderr]\n \n self._save_input(input)\n \n if self._input:\n input_view=memoryview(self._input)\n \n with _PopenSelector()as selector:\n if self.stdin and input:\n selector.register(self.stdin,selectors.EVENT_WRITE)\n if self.stdout and not self.stdout.closed:\n selector.register(self.stdout,selectors.EVENT_READ)\n if self.stderr and not self.stderr.closed:\n selector.register(self.stderr,selectors.EVENT_READ)\n \n while selector.get_map():\n timeout=self._remaining_time(endtime)\n if timeout is not None and timeout <0:\n self._check_timeout(endtime,orig_timeout,\n stdout,stderr,\n skip_check_and_raise=True )\n raise RuntimeError(\n '_check_timeout(..., skip_check_and_raise=True) '\n 'failed to raise TimeoutExpired.')\n \n ready=selector.select(timeout)\n self._check_timeout(endtime,orig_timeout,stdout,stderr)\n \n \n \n \n for key,events in ready:\n if key.fileobj is self.stdin:\n chunk=input_view[self._input_offset:\n self._input_offset+_PIPE_BUF]\n try :\n self._input_offset +=os.write(key.fd,chunk)\n except BrokenPipeError:\n selector.unregister(key.fileobj)\n key.fileobj.close()\n else :\n if self._input_offset >=len(self._input):\n selector.unregister(key.fileobj)\n key.fileobj.close()\n elif key.fileobj in (self.stdout,self.stderr):\n data=os.read(key.fd,32768)\n if not data:\n selector.unregister(key.fileobj)\n key.fileobj.close()\n self._fileobj2output[key.fileobj].append(data)\n \n self.wait(timeout=self._remaining_time(endtime))\n \n \n if stdout is not None :\n stdout=b''.join(stdout)\n if stderr is not None :\n stderr=b''.join(stderr)\n \n \n \n if self.text_mode:\n if stdout is not None :\n stdout=self._translate_newlines(stdout,\n self.stdout.encoding,\n self.stdout.errors)\n if stderr is not None :\n stderr=self._translate_newlines(stderr,\n self.stderr.encoding,\n self.stderr.errors)\n \n return (stdout,stderr)\n \n \n def _save_input(self,input):\n \n \n \n if self.stdin and self._input is None :\n self._input_offset=0\n self._input=input\n if input is not None and self.text_mode:\n self._input=self._input.encode(self.stdin.encoding,\n self.stdin.errors)\n \n \n def send_signal(self,sig):\n ''\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n self.poll()\n if self.returncode is not None :\n \n return\n \n \n \n \n os.kill(self.pid,sig)\n \n def terminate(self):\n ''\n \n self.send_signal(signal.SIGTERM)\n \n def kill(self):\n ''\n \n self.send_signal(signal.SIGKILL)\n", ["_posixsubprocess", "_winapi", "builtins", "contextlib", "errno", "grp", "io", "msvcrt", "os", "pwd", "select", "selectors", "signal", "sys", "threading", "time", "types", "warnings"]], "sys": [".py", "\nfrom _sys import *\nimport javascript\n\n_getframe=Getframe\n\nabiflags=0\n\ndef audit(event,*args):\n ''\n pass\n \nbrython_debug_mode=__BRYTHON__.debug\n\nbase_exec_prefix=__BRYTHON__.brython_path\n\nbase_prefix=__BRYTHON__.brython_path\n\nbuiltin_module_names=__BRYTHON__.builtin_module_names\n\nbyteorder='little'\n\ndont_write_bytecode=True\n\nexec_prefix=__BRYTHON__.brython_path\n\nexecutable=__BRYTHON__.brython_path+'/brython.js'\n\nargv=[__BRYTHON__.script_path]\n\n\ndef displayhook(value):\n if value is not None :\n stdout.write(repr(value))\n \ndef exit(i=None ):\n raise SystemExit('')\n \nclass flag_class:\n\n def __init__(self):\n self.debug=0\n self.inspect=0\n self.interactive=0\n self.optimize=0\n self.dont_write_bytecode=0\n self.no_user_site=0\n self.no_site=0\n self.ignore_environment=0\n self.verbose=0\n self.bytes_warning=0\n self.quiet=0\n self.hash_randomization=1\n \nflags=flag_class()\n\nclass float_info:\n mant_dig=53\n max=javascript.Number.MAX_VALUE\n min=javascript.Number.MIN_VALUE\n radix=2\n \ndef getfilesystemencoding(*args,**kw):\n ''\n\n \n return 'utf-8'\n \ndef getfilesystemencodeerrors():\n return \"utf-8\"\n \ndef getrecursionlimit():\n return 200\n \ndef intern(string):\n return string\n \nclass int_info:\n bits_per_digit=30\n sizeof_digit=4\n \nmaxsize=2 **63 -1\n\nmaxunicode=1114111\n\nplatform=\"brython\"\n\nprefix=__BRYTHON__.brython_path\n\nversion='.'.join(str(x)for x in __BRYTHON__.version_info[:3])\nversion +=\" (default, %s) \\n[Javascript 1.5] on Brython\"\\\n%__BRYTHON__.compiled_date\nhexversion=0x030800f0\n\nclass _version_info:\n\n def __init__(self,version_info):\n self.version_info=version_info\n self.major=version_info[0]\n self.minor=version_info[1]\n self.micro=version_info[2]\n self.releaselevel=version_info[3]\n self.serial=version_info[4]\n \n def __getitem__(self,index):\n if isinstance(self.version_info[index],list):\n return tuple(self.version_info[index])\n return self.version_info[index]\n \n def hexversion(self):\n try :\n return '0%d0%d0%d'%(self.major,self.minor,self.micro)\n finally :\n return '0%d0000'%(self.major)\n \n def __str__(self):\n _s=\"sys.version(major=%d, minor=%d, micro=%d, releaselevel='%s', \"\\\n \"serial=%d)\"\n return _s %(self.major,self.minor,self.micro,\n self.releaselevel,self.serial)\n \n def __eq__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)==other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __ge__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)>=other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __gt__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)>other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __le__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)<=other\n \n raise Error(\"Error! I don't know how to compare!\")\n \n def __lt__(self,other):\n if isinstance(other,tuple):\n return (self.major,self.minor,self.micro)0:\n for name in tuple(variables):\n value=notdone[name]\n m1=_findvar1_rx.search(value)\n m2=_findvar2_rx.search(value)\n if m1 and m2:\n m=m1 if m1.start()=\"5\":\n osname=\"solaris\"\n release=\"%d.%s\"%(int(release[0])-3,release[2:])\n \n \n \n bitness={2147483647:\"32bit\",9223372036854775807:\"64bit\"}\n machine +=\".%s\"%bitness[sys.maxsize]\n \n elif osname[:3]==\"aix\":\n from _aix_support import aix_platform\n return aix_platform()\n elif osname[:6]==\"cygwin\":\n osname=\"cygwin\"\n import re\n rel_re=re.compile(r'[\\d.]+')\n m=rel_re.match(release)\n if m:\n release=m.group()\n elif osname[:6]==\"darwin\":\n import _osx_support\n osname,release,machine=_osx_support.get_platform_osx(\n get_config_vars(),\n osname,release,machine)\n \n return \"%s-%s-%s\"%(osname,release,machine)\n \n \ndef get_python_version():\n return _PY_VERSION_SHORT\n \n \ndef _print_dict(title,data):\n for index,(key,value)in enumerate(sorted(data.items())):\n if index ==0:\n print('%s: '%(title))\n print('\\t%s = \"%s\"'%(key,value))\n \n \ndef _main():\n ''\n if '--generate-posix-vars'in sys.argv:\n _generate_posix_vars()\n return\n print('Platform: \"%s\"'%get_platform())\n print('Python version: \"%s\"'%get_python_version())\n print('Current installation scheme: \"%s\"'%_get_default_scheme())\n print()\n _print_dict('Paths',get_paths())\n print()\n _print_dict('Variables',get_config_vars())\n \n \nif __name__ =='__main__':\n _main()\n", ["_aix_support", "_osx_support", "os", "os.path", "pprint", "re", "sys", "types", "warnings"]], "tarfile": [".py", "#!/usr/bin/env python3\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n''\n\n\nversion=\"0.9.0\"\n__author__=\"Lars Gust\\u00e4bel (lars@gustaebel.de)\"\n__credits__=\"Gustavo Niemeyer, Niels Gust\\u00e4bel, Richard Townsend.\"\n\n\n\n\nfrom builtins import open as bltn_open\nimport sys\nimport os\nimport io\nimport shutil\nimport stat\nimport time\nimport struct\nimport copy\nimport re\n\ntry :\n import pwd\nexcept ImportError:\n pwd=None\ntry :\n import grp\nexcept ImportError:\n grp=None\n \n \nsymlink_exception=(AttributeError,NotImplementedError)\ntry :\n\n\n symlink_exception +=(OSError,)\nexcept NameError:\n pass\n \n \n__all__=[\"TarFile\",\"TarInfo\",\"is_tarfile\",\"TarError\",\"ReadError\",\n\"CompressionError\",\"StreamError\",\"ExtractError\",\"HeaderError\",\n\"ENCODING\",\"USTAR_FORMAT\",\"GNU_FORMAT\",\"PAX_FORMAT\",\n\"DEFAULT_FORMAT\",\"open\"]\n\n\n\n\nNUL=b\"\\0\"\nBLOCKSIZE=512\nRECORDSIZE=BLOCKSIZE *20\nGNU_MAGIC=b\"ustar \\0\"\nPOSIX_MAGIC=b\"ustar\\x0000\"\n\nLENGTH_NAME=100\nLENGTH_LINK=100\nLENGTH_PREFIX=155\n\nREGTYPE=b\"0\"\nAREGTYPE=b\"\\0\"\nLNKTYPE=b\"1\"\nSYMTYPE=b\"2\"\nCHRTYPE=b\"3\"\nBLKTYPE=b\"4\"\nDIRTYPE=b\"5\"\nFIFOTYPE=b\"6\"\nCONTTYPE=b\"7\"\n\nGNUTYPE_LONGNAME=b\"L\"\nGNUTYPE_LONGLINK=b\"K\"\nGNUTYPE_SPARSE=b\"S\"\n\nXHDTYPE=b\"x\"\nXGLTYPE=b\"g\"\nSOLARIS_XHDTYPE=b\"X\"\n\nUSTAR_FORMAT=0\nGNU_FORMAT=1\nPAX_FORMAT=2\nDEFAULT_FORMAT=PAX_FORMAT\n\n\n\n\n\nSUPPORTED_TYPES=(REGTYPE,AREGTYPE,LNKTYPE,\nSYMTYPE,DIRTYPE,FIFOTYPE,\nCONTTYPE,CHRTYPE,BLKTYPE,\nGNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nREGULAR_TYPES=(REGTYPE,AREGTYPE,\nCONTTYPE,GNUTYPE_SPARSE)\n\n\nGNU_TYPES=(GNUTYPE_LONGNAME,GNUTYPE_LONGLINK,\nGNUTYPE_SPARSE)\n\n\nPAX_FIELDS=(\"path\",\"linkpath\",\"size\",\"mtime\",\n\"uid\",\"gid\",\"uname\",\"gname\")\n\n\nPAX_NAME_FIELDS={\"path\",\"linkpath\",\"uname\",\"gname\"}\n\n\n\nPAX_NUMBER_FIELDS={\n\"atime\":float,\n\"ctime\":float,\n\"mtime\":float,\n\"uid\":int,\n\"gid\":int,\n\"size\":int\n}\n\n\n\n\nif os.name ==\"nt\":\n ENCODING=\"utf-8\"\nelse :\n ENCODING=sys.getfilesystemencoding()\n \n \n \n \n \ndef stn(s,length,encoding,errors):\n ''\n \n s=s.encode(encoding,errors)\n return s[:length]+(length -len(s))*NUL\n \ndef nts(s,encoding,errors):\n ''\n \n p=s.find(b\"\\0\")\n if p !=-1:\n s=s[:p]\n return s.decode(encoding,errors)\n \ndef nti(s):\n ''\n \n \n \n if s[0]in (0o200,0o377):\n n=0\n for i in range(len(s)-1):\n n <<=8\n n +=s[i+1]\n if s[0]==0o377:\n n=-(256 **(len(s)-1)-n)\n else :\n try :\n s=nts(s,\"ascii\",\"strict\")\n n=int(s.strip()or \"0\",8)\n except ValueError:\n raise InvalidHeaderError(\"invalid header\")\n return n\n \ndef itn(n,digits=8,format=DEFAULT_FORMAT):\n ''\n \n \n \n \n \n \n \n \n \n n=int(n)\n if 0 <=n <8 **(digits -1):\n s=bytes(\"%0*o\"%(digits -1,n),\"ascii\")+NUL\n elif format ==GNU_FORMAT and -256 **(digits -1)<=n <256 **(digits -1):\n if n >=0:\n s=bytearray([0o200])\n else :\n s=bytearray([0o377])\n n=256 **digits+n\n \n for i in range(digits -1):\n s.insert(1,n&0o377)\n n >>=8\n else :\n raise ValueError(\"overflow in number field\")\n \n return s\n \ndef calc_chksums(buf):\n ''\n\n\n\n\n\n\n \n unsigned_chksum=256+sum(struct.unpack_from(\"148B8x356B\",buf))\n signed_chksum=256+sum(struct.unpack_from(\"148b8x356b\",buf))\n return unsigned_chksum,signed_chksum\n \ndef copyfileobj(src,dst,length=None ,exception=OSError,bufsize=None ):\n ''\n\n \n bufsize=bufsize or 16 *1024\n if length ==0:\n return\n if length is None :\n shutil.copyfileobj(src,dst,bufsize)\n return\n \n blocks,remainder=divmod(length,bufsize)\n for b in range(blocks):\n buf=src.read(bufsize)\n if len(buf)self.bufsize:\n self.fileobj.write(self.buf[:self.bufsize])\n self.buf=self.buf[self.bufsize:]\n \n def close(self):\n ''\n\n \n if self.closed:\n return\n \n self.closed=True\n try :\n if self.mode ==\"w\"and self.comptype !=\"tar\":\n self.buf +=self.cmp.flush()\n \n if self.mode ==\"w\"and self.buf:\n self.fileobj.write(self.buf)\n self.buf=b\"\"\n if self.comptype ==\"gz\":\n self.fileobj.write(struct.pack(\"=0:\n blocks,remainder=divmod(pos -self.pos,self.bufsize)\n for i in range(blocks):\n self.read(self.bufsize)\n self.read(remainder)\n else :\n raise StreamError(\"seeking backwards is not allowed\")\n return self.pos\n \n def read(self,size):\n ''\n assert size is not None\n buf=self._read(size)\n self.pos +=len(buf)\n return buf\n \n def _read(self,size):\n ''\n \n if self.comptype ==\"tar\":\n return self.__read(size)\n \n c=len(self.dbuf)\n t=[self.dbuf]\n while c lastpos:\n self.map.append((False ,lastpos,offset,None ))\n self.map.append((True ,offset,offset+size,realpos))\n realpos +=size\n lastpos=offset+size\n if lastpos 0:\n while True :\n data,start,stop,offset=self.map[self.map_index]\n if start <=self.position \"%(self.__class__.__name__,self.name,id(self))\n \n def get_info(self):\n ''\n \n info={\n \"name\":self.name,\n \"mode\":self.mode&0o7777,\n \"uid\":self.uid,\n \"gid\":self.gid,\n \"size\":self.size,\n \"mtime\":self.mtime,\n \"chksum\":self.chksum,\n \"type\":self.type,\n \"linkname\":self.linkname,\n \"uname\":self.uname,\n \"gname\":self.gname,\n \"devmajor\":self.devmajor,\n \"devminor\":self.devminor\n }\n \n if info[\"type\"]==DIRTYPE and not info[\"name\"].endswith(\"/\"):\n info[\"name\"]+=\"/\"\n \n return info\n \n def tobuf(self,format=DEFAULT_FORMAT,encoding=ENCODING,errors=\"surrogateescape\"):\n ''\n \n info=self.get_info()\n \n if format ==USTAR_FORMAT:\n return self.create_ustar_header(info,encoding,errors)\n elif format ==GNU_FORMAT:\n return self.create_gnu_header(info,encoding,errors)\n elif format ==PAX_FORMAT:\n return self.create_pax_header(info,encoding)\n else :\n raise ValueError(\"invalid format\")\n \n def create_ustar_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=POSIX_MAGIC\n \n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n raise ValueError(\"linkname is too long\")\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n info[\"prefix\"],info[\"name\"]=self._posix_split_name(info[\"name\"],encoding,errors)\n \n return self._create_header(info,USTAR_FORMAT,encoding,errors)\n \n def create_gnu_header(self,info,encoding,errors):\n ''\n \n info[\"magic\"]=GNU_MAGIC\n \n buf=b\"\"\n if len(info[\"linkname\"].encode(encoding,errors))>LENGTH_LINK:\n buf +=self._create_gnu_long_header(info[\"linkname\"],GNUTYPE_LONGLINK,encoding,errors)\n \n if len(info[\"name\"].encode(encoding,errors))>LENGTH_NAME:\n buf +=self._create_gnu_long_header(info[\"name\"],GNUTYPE_LONGNAME,encoding,errors)\n \n return buf+self._create_header(info,GNU_FORMAT,encoding,errors)\n \n def create_pax_header(self,info,encoding):\n ''\n\n\n \n info[\"magic\"]=POSIX_MAGIC\n pax_headers=self.pax_headers.copy()\n \n \n \n for name,hname,length in (\n (\"name\",\"path\",LENGTH_NAME),(\"linkname\",\"linkpath\",LENGTH_LINK),\n (\"uname\",\"uname\",32),(\"gname\",\"gname\",32)):\n \n if hname in pax_headers:\n \n continue\n \n \n try :\n info[name].encode(\"ascii\",\"strict\")\n except UnicodeEncodeError:\n pax_headers[hname]=info[name]\n continue\n \n if len(info[name])>length:\n pax_headers[hname]=info[name]\n \n \n \n for name,digits in ((\"uid\",8),(\"gid\",8),(\"size\",12),(\"mtime\",12)):\n if name in pax_headers:\n \n info[name]=0\n continue\n \n val=info[name]\n if not 0 <=val <8 **(digits -1)or isinstance(val,float):\n pax_headers[name]=str(val)\n info[name]=0\n \n \n if pax_headers:\n buf=self._create_pax_generic_header(pax_headers,XHDTYPE,encoding)\n else :\n buf=b\"\"\n \n return buf+self._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")\n \n @classmethod\n def create_pax_global_header(cls,pax_headers):\n ''\n \n return cls._create_pax_generic_header(pax_headers,XGLTYPE,\"utf-8\")\n \n def _posix_split_name(self,name,encoding,errors):\n ''\n\n \n components=name.split(\"/\")\n for i in range(1,len(components)):\n prefix=\"/\".join(components[:i])\n name=\"/\".join(components[i:])\n if len(prefix.encode(encoding,errors))<=LENGTH_PREFIX and\\\n len(name.encode(encoding,errors))<=LENGTH_NAME:\n break\n else :\n raise ValueError(\"name is too long\")\n \n return prefix,name\n \n @staticmethod\n def _create_header(info,format,encoding,errors):\n ''\n\n \n has_device_fields=info.get(\"type\")in (CHRTYPE,BLKTYPE)\n if has_device_fields:\n devmajor=itn(info.get(\"devmajor\",0),8,format)\n devminor=itn(info.get(\"devminor\",0),8,format)\n else :\n devmajor=stn(\"\",8,encoding,errors)\n devminor=stn(\"\",8,encoding,errors)\n \n parts=[\n stn(info.get(\"name\",\"\"),100,encoding,errors),\n itn(info.get(\"mode\",0)&0o7777,8,format),\n itn(info.get(\"uid\",0),8,format),\n itn(info.get(\"gid\",0),8,format),\n itn(info.get(\"size\",0),12,format),\n itn(info.get(\"mtime\",0),12,format),\n b\" \",\n info.get(\"type\",REGTYPE),\n stn(info.get(\"linkname\",\"\"),100,encoding,errors),\n info.get(\"magic\",POSIX_MAGIC),\n stn(info.get(\"uname\",\"\"),32,encoding,errors),\n stn(info.get(\"gname\",\"\"),32,encoding,errors),\n devmajor,\n devminor,\n stn(info.get(\"prefix\",\"\"),155,encoding,errors)\n ]\n \n buf=struct.pack(\"%ds\"%BLOCKSIZE,b\"\".join(parts))\n chksum=calc_chksums(buf[-BLOCKSIZE:])[0]\n buf=buf[:-364]+bytes(\"%06o\\0\"%chksum,\"ascii\")+buf[-357:]\n return buf\n \n @staticmethod\n def _create_payload(payload):\n ''\n\n \n blocks,remainder=divmod(len(payload),BLOCKSIZE)\n if remainder >0:\n payload +=(BLOCKSIZE -remainder)*NUL\n return payload\n \n @classmethod\n def _create_gnu_long_header(cls,name,type,encoding,errors):\n ''\n\n \n name=name.encode(encoding,errors)+NUL\n \n info={}\n info[\"name\"]=\"././@LongLink\"\n info[\"type\"]=type\n info[\"size\"]=len(name)\n info[\"magic\"]=GNU_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,encoding,errors)+\\\n cls._create_payload(name)\n \n @classmethod\n def _create_pax_generic_header(cls,pax_headers,type,encoding):\n ''\n\n\n \n \n \n binary=False\n for keyword,value in pax_headers.items():\n try :\n value.encode(\"utf-8\",\"strict\")\n except UnicodeEncodeError:\n binary=True\n break\n \n records=b\"\"\n if binary:\n \n records +=b\"21 hdrcharset=BINARY\\n\"\n \n for keyword,value in pax_headers.items():\n keyword=keyword.encode(\"utf-8\")\n if binary:\n \n \n value=value.encode(encoding,\"surrogateescape\")\n else :\n value=value.encode(\"utf-8\")\n \n l=len(keyword)+len(value)+3\n n=p=0\n while True :\n n=l+len(str(p))\n if n ==p:\n break\n p=n\n records +=bytes(str(p),\"ascii\")+b\" \"+keyword+b\"=\"+value+b\"\\n\"\n \n \n \n info={}\n info[\"name\"]=\"././@PaxHeader\"\n info[\"type\"]=type\n info[\"size\"]=len(records)\n info[\"magic\"]=POSIX_MAGIC\n \n \n return cls._create_header(info,USTAR_FORMAT,\"ascii\",\"replace\")+\\\n cls._create_payload(records)\n \n @classmethod\n def frombuf(cls,buf,encoding,errors):\n ''\n \n if len(buf)==0:\n raise EmptyHeaderError(\"empty header\")\n if len(buf)!=BLOCKSIZE:\n raise TruncatedHeaderError(\"truncated header\")\n if buf.count(NUL)==BLOCKSIZE:\n raise EOFHeaderError(\"end of file header\")\n \n chksum=nti(buf[148:156])\n if chksum not in calc_chksums(buf):\n raise InvalidHeaderError(\"bad checksum\")\n \n obj=cls()\n obj.name=nts(buf[0:100],encoding,errors)\n obj.mode=nti(buf[100:108])\n obj.uid=nti(buf[108:116])\n obj.gid=nti(buf[116:124])\n obj.size=nti(buf[124:136])\n obj.mtime=nti(buf[136:148])\n obj.chksum=chksum\n obj.type=buf[156:157]\n obj.linkname=nts(buf[157:257],encoding,errors)\n obj.uname=nts(buf[265:297],encoding,errors)\n obj.gname=nts(buf[297:329],encoding,errors)\n obj.devmajor=nti(buf[329:337])\n obj.devminor=nti(buf[337:345])\n prefix=nts(buf[345:500],encoding,errors)\n \n \n \n if obj.type ==AREGTYPE and obj.name.endswith(\"/\"):\n obj.type=DIRTYPE\n \n \n \n \n if obj.type ==GNUTYPE_SPARSE:\n pos=386\n structs=[]\n for i in range(4):\n try :\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[482])\n origsize=nti(buf[483:495])\n obj._sparse_structs=(structs,isextended,origsize)\n \n \n if obj.isdir():\n obj.name=obj.name.rstrip(\"/\")\n \n \n if prefix and obj.type not in GNU_TYPES:\n obj.name=prefix+\"/\"+obj.name\n return obj\n \n @classmethod\n def fromtarfile(cls,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(BLOCKSIZE)\n obj=cls.frombuf(buf,tarfile.encoding,tarfile.errors)\n obj.offset=tarfile.fileobj.tell()-BLOCKSIZE\n return obj._proc_member(tarfile)\n \n \n \n \n \n \n \n \n \n \n \n \n def _proc_member(self,tarfile):\n ''\n\n \n if self.type in (GNUTYPE_LONGNAME,GNUTYPE_LONGLINK):\n return self._proc_gnulong(tarfile)\n elif self.type ==GNUTYPE_SPARSE:\n return self._proc_sparse(tarfile)\n elif self.type in (XHDTYPE,XGLTYPE,SOLARIS_XHDTYPE):\n return self._proc_pax(tarfile)\n else :\n return self._proc_builtin(tarfile)\n \n def _proc_builtin(self,tarfile):\n ''\n\n \n self.offset_data=tarfile.fileobj.tell()\n offset=self.offset_data\n if self.isreg()or self.type not in SUPPORTED_TYPES:\n \n offset +=self._block(self.size)\n tarfile.offset=offset\n \n \n \n self._apply_pax_info(tarfile.pax_headers,tarfile.encoding,tarfile.errors)\n \n return self\n \n def _proc_gnulong(self,tarfile):\n ''\n\n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n try :\n next=self.fromtarfile(tarfile)\n except HeaderError:\n raise SubsequentHeaderError(\"missing or bad subsequent header\")\n \n \n \n next.offset=self.offset\n if self.type ==GNUTYPE_LONGNAME:\n next.name=nts(buf,tarfile.encoding,tarfile.errors)\n elif self.type ==GNUTYPE_LONGLINK:\n next.linkname=nts(buf,tarfile.encoding,tarfile.errors)\n \n return next\n \n def _proc_sparse(self,tarfile):\n ''\n \n \n structs,isextended,origsize=self._sparse_structs\n del self._sparse_structs\n \n \n while isextended:\n buf=tarfile.fileobj.read(BLOCKSIZE)\n pos=0\n for i in range(21):\n try :\n offset=nti(buf[pos:pos+12])\n numbytes=nti(buf[pos+12:pos+24])\n except ValueError:\n break\n if offset and numbytes:\n structs.append((offset,numbytes))\n pos +=24\n isextended=bool(buf[504])\n self.sparse=structs\n \n self.offset_data=tarfile.fileobj.tell()\n tarfile.offset=self.offset_data+self._block(self.size)\n self.size=origsize\n return self\n \n def _proc_pax(self,tarfile):\n ''\n\n \n \n buf=tarfile.fileobj.read(self._block(self.size))\n \n \n \n \n if self.type ==XGLTYPE:\n pax_headers=tarfile.pax_headers\n else :\n pax_headers=tarfile.pax_headers.copy()\n \n \n \n \n \n \n match=re.search(br\"\\d+ hdrcharset=([^\\n]+)\\n\",buf)\n if match is not None :\n pax_headers[\"hdrcharset\"]=match.group(1).decode(\"utf-8\")\n \n \n \n \n hdrcharset=pax_headers.get(\"hdrcharset\")\n if hdrcharset ==\"BINARY\":\n encoding=tarfile.encoding\n else :\n encoding=\"utf-8\"\n \n \n \n \n \n regex=re.compile(br\"(\\d+) ([^=]+)=\")\n pos=0\n while True :\n match=regex.match(buf,pos)\n if not match:\n break\n \n length,keyword=match.groups()\n length=int(length)\n if length ==0:\n raise InvalidHeaderError(\"invalid header\")\n value=buf[match.end(2)+1:match.start(1)+length -1]\n \n \n \n \n \n \n \n \n keyword=self._decode_pax_field(keyword,\"utf-8\",\"utf-8\",\n tarfile.errors)\n if keyword in PAX_NAME_FIELDS:\n value=self._decode_pax_field(value,encoding,tarfile.encoding,\n tarfile.errors)\n else :\n value=self._decode_pax_field(value,\"utf-8\",\"utf-8\",\n tarfile.errors)\n \n pax_headers[keyword]=value\n pos +=length\n \n \n try :\n next=self.fromtarfile(tarfile)\n except HeaderError:\n raise SubsequentHeaderError(\"missing or bad subsequent header\")\n \n \n if \"GNU.sparse.map\"in pax_headers:\n \n self._proc_gnusparse_01(next,pax_headers)\n \n elif \"GNU.sparse.size\"in pax_headers:\n \n self._proc_gnusparse_00(next,pax_headers,buf)\n \n elif pax_headers.get(\"GNU.sparse.major\")==\"1\"and pax_headers.get(\"GNU.sparse.minor\")==\"0\":\n \n self._proc_gnusparse_10(next,pax_headers,tarfile)\n \n if self.type in (XHDTYPE,SOLARIS_XHDTYPE):\n \n next._apply_pax_info(pax_headers,tarfile.encoding,tarfile.errors)\n next.offset=self.offset\n \n if \"size\"in pax_headers:\n \n \n \n offset=next.offset_data\n if next.isreg()or next.type not in SUPPORTED_TYPES:\n offset +=next._block(next.size)\n tarfile.offset=offset\n \n return next\n \n def _proc_gnusparse_00(self,next,pax_headers,buf):\n ''\n \n offsets=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.offset=(\\d+)\\n\",buf):\n offsets.append(int(match.group(1)))\n numbytes=[]\n for match in re.finditer(br\"\\d+ GNU.sparse.numbytes=(\\d+)\\n\",buf):\n numbytes.append(int(match.group(1)))\n next.sparse=list(zip(offsets,numbytes))\n \n def _proc_gnusparse_01(self,next,pax_headers):\n ''\n \n sparse=[int(x)for x in pax_headers[\"GNU.sparse.map\"].split(\",\")]\n next.sparse=list(zip(sparse[::2],sparse[1::2]))\n \n def _proc_gnusparse_10(self,next,pax_headers,tarfile):\n ''\n \n fields=None\n sparse=[]\n buf=tarfile.fileobj.read(BLOCKSIZE)\n fields,buf=buf.split(b\"\\n\",1)\n fields=int(fields)\n while len(sparse)0:\n self.fileobj.write(NUL *(RECORDSIZE -remainder))\n finally :\n if not self._extfileobj:\n self.fileobj.close()\n \n def getmember(self,name):\n ''\n\n\n\n \n tarinfo=self._getmember(name)\n if tarinfo is None :\n raise KeyError(\"filename %r not found\"%name)\n return tarinfo\n \n def getmembers(self):\n ''\n\n \n self._check()\n if not self._loaded:\n self._load()\n \n return self.members\n \n def getnames(self):\n ''\n\n \n return [tarinfo.name for tarinfo in self.getmembers()]\n \n def gettarinfo(self,name=None ,arcname=None ,fileobj=None ):\n ''\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n \n \n if fileobj is not None :\n name=fileobj.name\n \n \n \n \n if arcname is None :\n arcname=name\n drv,arcname=os.path.splitdrive(arcname)\n arcname=arcname.replace(os.sep,\"/\")\n arcname=arcname.lstrip(\"/\")\n \n \n \n tarinfo=self.tarinfo()\n tarinfo.tarfile=self\n \n \n if fileobj is None :\n if not self.dereference:\n statres=os.lstat(name)\n else :\n statres=os.stat(name)\n else :\n statres=os.fstat(fileobj.fileno())\n linkname=\"\"\n \n stmd=statres.st_mode\n if stat.S_ISREG(stmd):\n inode=(statres.st_ino,statres.st_dev)\n if not self.dereference and statres.st_nlink >1 and\\\n inode in self.inodes and arcname !=self.inodes[inode]:\n \n \n type=LNKTYPE\n linkname=self.inodes[inode]\n else :\n \n \n type=REGTYPE\n if inode[0]:\n self.inodes[inode]=arcname\n elif stat.S_ISDIR(stmd):\n type=DIRTYPE\n elif stat.S_ISFIFO(stmd):\n type=FIFOTYPE\n elif stat.S_ISLNK(stmd):\n type=SYMTYPE\n linkname=os.readlink(name)\n elif stat.S_ISCHR(stmd):\n type=CHRTYPE\n elif stat.S_ISBLK(stmd):\n type=BLKTYPE\n else :\n return None\n \n \n \n tarinfo.name=arcname\n tarinfo.mode=stmd\n tarinfo.uid=statres.st_uid\n tarinfo.gid=statres.st_gid\n if type ==REGTYPE:\n tarinfo.size=statres.st_size\n else :\n tarinfo.size=0\n tarinfo.mtime=statres.st_mtime\n tarinfo.type=type\n tarinfo.linkname=linkname\n if pwd:\n try :\n tarinfo.uname=pwd.getpwuid(tarinfo.uid)[0]\n except KeyError:\n pass\n if grp:\n try :\n tarinfo.gname=grp.getgrgid(tarinfo.gid)[0]\n except KeyError:\n pass\n \n if type in (CHRTYPE,BLKTYPE):\n if hasattr(os,\"major\")and hasattr(os,\"minor\"):\n tarinfo.devmajor=os.major(statres.st_rdev)\n tarinfo.devminor=os.minor(statres.st_rdev)\n return tarinfo\n \n def list(self,verbose=True ,*,members=None ):\n ''\n\n\n\n \n self._check()\n \n if members is None :\n members=self\n for tarinfo in members:\n if verbose:\n _safe_print(stat.filemode(tarinfo.mode))\n _safe_print(\"%s/%s\"%(tarinfo.uname or tarinfo.uid,\n tarinfo.gname or tarinfo.gid))\n if tarinfo.ischr()or tarinfo.isblk():\n _safe_print(\"%10s\"%\n (\"%d,%d\"%(tarinfo.devmajor,tarinfo.devminor)))\n else :\n _safe_print(\"%10d\"%tarinfo.size)\n _safe_print(\"%d-%02d-%02d %02d:%02d:%02d\"\\\n %time.localtime(tarinfo.mtime)[:6])\n \n _safe_print(tarinfo.name+(\"/\"if tarinfo.isdir()else \"\"))\n \n if verbose:\n if tarinfo.issym():\n _safe_print(\"-> \"+tarinfo.linkname)\n if tarinfo.islnk():\n _safe_print(\"link to \"+tarinfo.linkname)\n print()\n \n def add(self,name,arcname=None ,recursive=True ,*,filter=None ):\n ''\n\n\n\n\n\n\n\n \n self._check(\"awx\")\n \n if arcname is None :\n arcname=name\n \n \n if self.name is not None and os.path.abspath(name)==self.name:\n self._dbg(2,\"tarfile: Skipped %r\"%name)\n return\n \n self._dbg(1,name)\n \n \n tarinfo=self.gettarinfo(name,arcname)\n \n if tarinfo is None :\n self._dbg(1,\"tarfile: Unsupported type %r\"%name)\n return\n \n \n if filter is not None :\n tarinfo=filter(tarinfo)\n if tarinfo is None :\n self._dbg(2,\"tarfile: Excluded %r\"%name)\n return\n \n \n if tarinfo.isreg():\n with bltn_open(name,\"rb\")as f:\n self.addfile(tarinfo,f)\n \n elif tarinfo.isdir():\n self.addfile(tarinfo)\n if recursive:\n for f in sorted(os.listdir(name)):\n self.add(os.path.join(name,f),os.path.join(arcname,f),\n recursive,filter=filter)\n \n else :\n self.addfile(tarinfo)\n \n def addfile(self,tarinfo,fileobj=None ):\n ''\n\n\n\n \n self._check(\"awx\")\n \n tarinfo=copy.copy(tarinfo)\n \n buf=tarinfo.tobuf(self.format,self.encoding,self.errors)\n self.fileobj.write(buf)\n self.offset +=len(buf)\n bufsize=self.copybufsize\n \n if fileobj is not None :\n copyfileobj(fileobj,self.fileobj,tarinfo.size,bufsize=bufsize)\n blocks,remainder=divmod(tarinfo.size,BLOCKSIZE)\n if remainder >0:\n self.fileobj.write(NUL *(BLOCKSIZE -remainder))\n blocks +=1\n self.offset +=blocks *BLOCKSIZE\n \n self.members.append(tarinfo)\n \n def extractall(self,path=\".\",members=None ,*,numeric_owner=False ):\n ''\n\n\n\n\n\n \n directories=[]\n \n if members is None :\n members=self\n \n for tarinfo in members:\n if tarinfo.isdir():\n \n directories.append(tarinfo)\n tarinfo=copy.copy(tarinfo)\n tarinfo.mode=0o700\n \n self.extract(tarinfo,path,set_attrs=not tarinfo.isdir(),\n numeric_owner=numeric_owner)\n \n \n directories.sort(key=lambda a:a.name)\n directories.reverse()\n \n \n for tarinfo in directories:\n dirpath=os.path.join(path,tarinfo.name)\n try :\n self.chown(tarinfo,dirpath,numeric_owner=numeric_owner)\n self.utime(tarinfo,dirpath)\n self.chmod(tarinfo,dirpath)\n except ExtractError as e:\n if self.errorlevel >1:\n raise\n else :\n self._dbg(1,\"tarfile: %s\"%e)\n \n def extract(self,member,path=\"\",set_attrs=True ,*,numeric_owner=False ):\n ''\n\n\n\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else :\n tarinfo=member\n \n \n if tarinfo.islnk():\n tarinfo._link_target=os.path.join(path,tarinfo.linkname)\n \n try :\n self._extract_member(tarinfo,os.path.join(path,tarinfo.name),\n set_attrs=set_attrs,\n numeric_owner=numeric_owner)\n except OSError as e:\n if self.errorlevel >0:\n raise\n else :\n if e.filename is None :\n self._dbg(1,\"tarfile: %s\"%e.strerror)\n else :\n self._dbg(1,\"tarfile: %s %r\"%(e.strerror,e.filename))\n except ExtractError as e:\n if self.errorlevel >1:\n raise\n else :\n self._dbg(1,\"tarfile: %s\"%e)\n \n def extractfile(self,member):\n ''\n\n\n\n \n self._check(\"r\")\n \n if isinstance(member,str):\n tarinfo=self.getmember(member)\n else :\n tarinfo=member\n \n if tarinfo.isreg()or tarinfo.type not in SUPPORTED_TYPES:\n \n return self.fileobject(self,tarinfo)\n \n elif tarinfo.islnk()or tarinfo.issym():\n if isinstance(self.fileobj,_Stream):\n \n \n \n raise StreamError(\"cannot extract (sym)link as file object\")\n else :\n \n return self.extractfile(self._find_link_target(tarinfo))\n else :\n \n \n return None\n \n def _extract_member(self,tarinfo,targetpath,set_attrs=True ,\n numeric_owner=False ):\n ''\n\n \n \n \n \n targetpath=targetpath.rstrip(\"/\")\n targetpath=targetpath.replace(\"/\",os.sep)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n \n \n os.makedirs(upperdirs)\n \n if tarinfo.islnk()or tarinfo.issym():\n self._dbg(1,\"%s -> %s\"%(tarinfo.name,tarinfo.linkname))\n else :\n self._dbg(1,tarinfo.name)\n \n if tarinfo.isreg():\n self.makefile(tarinfo,targetpath)\n elif tarinfo.isdir():\n self.makedir(tarinfo,targetpath)\n elif tarinfo.isfifo():\n self.makefifo(tarinfo,targetpath)\n elif tarinfo.ischr()or tarinfo.isblk():\n self.makedev(tarinfo,targetpath)\n elif tarinfo.islnk()or tarinfo.issym():\n self.makelink(tarinfo,targetpath)\n elif tarinfo.type not in SUPPORTED_TYPES:\n self.makeunknown(tarinfo,targetpath)\n else :\n self.makefile(tarinfo,targetpath)\n \n if set_attrs:\n self.chown(tarinfo,targetpath,numeric_owner)\n if not tarinfo.issym():\n self.chmod(tarinfo,targetpath)\n self.utime(tarinfo,targetpath)\n \n \n \n \n \n \n def makedir(self,tarinfo,targetpath):\n ''\n \n try :\n \n \n os.mkdir(targetpath,0o700)\n except FileExistsError:\n pass\n \n def makefile(self,tarinfo,targetpath):\n ''\n \n source=self.fileobj\n source.seek(tarinfo.offset_data)\n bufsize=self.copybufsize\n with bltn_open(targetpath,\"wb\")as target:\n if tarinfo.sparse is not None :\n for offset,size in tarinfo.sparse:\n target.seek(offset)\n copyfileobj(source,target,size,ReadError,bufsize)\n target.seek(tarinfo.size)\n target.truncate()\n else :\n copyfileobj(source,target,tarinfo.size,ReadError,bufsize)\n \n def makeunknown(self,tarinfo,targetpath):\n ''\n\n \n self.makefile(tarinfo,targetpath)\n self._dbg(1,\"tarfile: Unknown file type %r, \"\\\n \"extracted as regular file.\"%tarinfo.type)\n \n def makefifo(self,tarinfo,targetpath):\n ''\n \n if hasattr(os,\"mkfifo\"):\n os.mkfifo(targetpath)\n else :\n raise ExtractError(\"fifo not supported by system\")\n \n def makedev(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,\"mknod\")or not hasattr(os,\"makedev\"):\n raise ExtractError(\"special devices not supported by system\")\n \n mode=tarinfo.mode\n if tarinfo.isblk():\n mode |=stat.S_IFBLK\n else :\n mode |=stat.S_IFCHR\n \n os.mknod(targetpath,mode,\n os.makedev(tarinfo.devmajor,tarinfo.devminor))\n \n def makelink(self,tarinfo,targetpath):\n ''\n\n\n \n try :\n \n if tarinfo.issym():\n os.symlink(tarinfo.linkname,targetpath)\n else :\n \n if os.path.exists(tarinfo._link_target):\n os.link(tarinfo._link_target,targetpath)\n else :\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except symlink_exception:\n try :\n self._extract_member(self._find_link_target(tarinfo),\n targetpath)\n except KeyError:\n raise ExtractError(\"unable to resolve link inside archive\")\n \n def chown(self,tarinfo,targetpath,numeric_owner):\n ''\n\n\n\n \n if hasattr(os,\"geteuid\")and os.geteuid()==0:\n \n g=tarinfo.gid\n u=tarinfo.uid\n if not numeric_owner:\n try :\n if grp:\n g=grp.getgrnam(tarinfo.gname)[2]\n except KeyError:\n pass\n try :\n if pwd:\n u=pwd.getpwnam(tarinfo.uname)[2]\n except KeyError:\n pass\n try :\n if tarinfo.issym()and hasattr(os,\"lchown\"):\n os.lchown(targetpath,u,g)\n else :\n os.chown(targetpath,u,g)\n except OSError:\n raise ExtractError(\"could not change owner\")\n \n def chmod(self,tarinfo,targetpath):\n ''\n \n try :\n os.chmod(targetpath,tarinfo.mode)\n except OSError:\n raise ExtractError(\"could not change mode\")\n \n def utime(self,tarinfo,targetpath):\n ''\n \n if not hasattr(os,'utime'):\n return\n try :\n os.utime(targetpath,(tarinfo.mtime,tarinfo.mtime))\n except OSError:\n raise ExtractError(\"could not change modification time\")\n \n \n def next(self):\n ''\n\n\n \n self._check(\"ra\")\n if self.firstmember is not None :\n m=self.firstmember\n self.firstmember=None\n return m\n \n \n if self.offset !=self.fileobj.tell():\n self.fileobj.seek(self.offset -1)\n if not self.fileobj.read(1):\n raise ReadError(\"unexpected end of data\")\n \n \n tarinfo=None\n while True :\n try :\n tarinfo=self.tarinfo.fromtarfile(self)\n except EOFHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n except InvalidHeaderError as e:\n if self.ignore_zeros:\n self._dbg(2,\"0x%X: %s\"%(self.offset,e))\n self.offset +=BLOCKSIZE\n continue\n elif self.offset ==0:\n raise ReadError(str(e))\n except EmptyHeaderError:\n if self.offset ==0:\n raise ReadError(\"empty file\")\n except TruncatedHeaderError as e:\n if self.offset ==0:\n raise ReadError(str(e))\n except SubsequentHeaderError as e:\n raise ReadError(str(e))\n break\n \n if tarinfo is not None :\n self.members.append(tarinfo)\n else :\n self._loaded=True\n \n return tarinfo\n \n \n \n \n def _getmember(self,name,tarinfo=None ,normalize=False ):\n ''\n\n \n \n members=self.getmembers()\n \n \n if tarinfo is not None :\n members=members[:members.index(tarinfo)]\n \n if normalize:\n name=os.path.normpath(name)\n \n for member in reversed(members):\n if normalize:\n member_name=os.path.normpath(member.name)\n else :\n member_name=member.name\n \n if name ==member_name:\n return member\n \n def _load(self):\n ''\n\n \n while True :\n tarinfo=self.next()\n if tarinfo is None :\n break\n self._loaded=True\n \n def _check(self,mode=None ):\n ''\n\n \n if self.closed:\n raise OSError(\"%s is closed\"%self.__class__.__name__)\n if mode is not None and self.mode not in mode:\n raise OSError(\"bad operation for mode %r\"%self.mode)\n \n def _find_link_target(self,tarinfo):\n ''\n\n \n if tarinfo.issym():\n \n linkname=\"/\".join(filter(None ,(os.path.dirname(tarinfo.name),tarinfo.linkname)))\n limit=None\n else :\n \n \n linkname=tarinfo.linkname\n limit=tarinfo\n \n member=self._getmember(linkname,tarinfo=limit,normalize=True )\n if member is None :\n raise KeyError(\"linkname %r not found\"%linkname)\n return member\n \n def __iter__(self):\n ''\n \n if self._loaded:\n yield from self.members\n return\n \n \n \n index=0\n \n \n \n if self.firstmember is not None :\n tarinfo=self.next()\n index +=1\n yield tarinfo\n \n while True :\n if index ',''),\n help='Extract tarfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create tarfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a tarfile is valid')\n args=parser.parse_args()\n \n if args.test is not None :\n src=args.test\n if is_tarfile(src):\n with open(src,'r')as tar:\n tar.getmembers()\n print(tar.getmembers(),file=sys.stderr)\n if args.verbose:\n print('{!r} is a tar archive.'.format(src))\n else :\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.list is not None :\n src=args.list\n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.list(verbose=args.verbose)\n else :\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.extract is not None :\n if len(args.extract)==1:\n src=args.extract[0]\n curdir=os.curdir\n elif len(args.extract)==2:\n src,curdir=args.extract\n else :\n parser.exit(1,parser.format_help())\n \n if is_tarfile(src):\n with TarFile.open(src,'r:*')as tf:\n tf.extractall(path=curdir)\n if args.verbose:\n if curdir =='.':\n msg='{!r} file is extracted.'.format(src)\n else :\n msg=('{!r} file is extracted '\n 'into {!r} directory.').format(src,curdir)\n print(msg)\n else :\n parser.exit(1,'{!r} is not a tar archive.\\n'.format(src))\n \n elif args.create is not None :\n tar_name=args.create.pop(0)\n _,ext=os.path.splitext(tar_name)\n compressions={\n \n '.gz':'gz',\n '.tgz':'gz',\n \n '.xz':'xz',\n '.txz':'xz',\n \n '.bz2':'bz2',\n '.tbz':'bz2',\n '.tbz2':'bz2',\n '.tb2':'bz2',\n }\n tar_mode='w:'+compressions[ext]if ext in compressions else 'w'\n tar_files=args.create\n \n with TarFile.open(tar_name,tar_mode)as tf:\n for file_name in tar_files:\n tf.add(file_name)\n \n if args.verbose:\n print('{!r} file created.'.format(tar_name))\n \nif __name__ =='__main__':\n main()\n", ["argparse", "builtins", "bz2", "copy", "grp", "gzip", "io", "lzma", "os", "pwd", "re", "shutil", "stat", "struct", "sys", "time", "zlib"]], "tb": [".py", "import sys\nfrom browser import console\n\nclass Trace:\n\n def __init__(self):\n self.buf=\"\"\n \n def write(self,*data):\n self.buf +=\" \".join([str(x)for x in data])\n \n def format(self):\n ''\n return self.buf\n \ndef format_exc():\n trace=Trace()\n exc_info=sys.exc_info()\n exc_class=exc_info[0].__name__\n exc_msg=str(exc_info[1])\n tb=exc_info[2].tb_next\n if exc_info[0]is SyntaxError:\n return syntax_error(exc_info[1].args)\n trace.write(\"Traceback (most recent call last):\\n\")\n while tb is not None :\n frame=tb.tb_frame\n code=frame.f_code\n name=code.co_name\n filename=code.co_filename\n trace.write(f\" File {filename}, line {tb.tb_lineno}, in {name}\\n\")\n if not filename.startswith(\"<\"):\n trace.write(f\" {tb.tb_lasti}\\n\")\n tb=tb.tb_next\n trace.write(f\"{exc_class}: {exc_msg}\\n\")\n return trace.format()\n \ndef print_exc(file=None ):\n if file is None :\n file=sys.stderr\n file.write(format_exc())\n \ndef syntax_error(args):\n trace=Trace()\n info,filename,lineno,offset,line=args\n trace.write(f\" File {filename}, line {lineno}\\n\")\n trace.write(\" \"+line+\"\\n\")\n trace.write(\" \"+offset *\" \"+\"^\\n\")\n trace.write(\"SyntaxError:\",info,\"\\n\")\n return trace.buf\n", ["browser", "sys"]], "tempfile": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n\"NamedTemporaryFile\",\"TemporaryFile\",\n\"SpooledTemporaryFile\",\"TemporaryDirectory\",\n\"mkstemp\",\"mkdtemp\",\n\"mktemp\",\n\"TMP_MAX\",\"gettempprefix\",\n\"tempdir\",\"gettempdir\",\n\"gettempprefixb\",\"gettempdirb\",\n]\n\n\n\n\nimport functools as _functools\nimport warnings as _warnings\nimport io as _io\nimport os as _os\nimport shutil as _shutil\nimport errno as _errno\nfrom random import Random as _Random\nimport sys as _sys\nimport types as _types\nimport weakref as _weakref\nimport _thread\n_allocate_lock=_thread.allocate_lock\n\n_text_openflags=_os.O_RDWR |_os.O_CREAT |_os.O_EXCL\nif hasattr(_os,'O_NOFOLLOW'):\n _text_openflags |=_os.O_NOFOLLOW\n \n_bin_openflags=_text_openflags\nif hasattr(_os,'O_BINARY'):\n _bin_openflags |=_os.O_BINARY\n \nif hasattr(_os,'TMP_MAX'):\n TMP_MAX=_os.TMP_MAX\nelse :\n TMP_MAX=10000\n \n \n \n \n \ntemplate=\"tmp\"\n\n\n\n_once_lock=_allocate_lock()\n\n\ndef _exists(fn):\n try :\n _os.lstat(fn)\n except OSError:\n return False\n else :\n return True\n \n \ndef _infer_return_type(*args):\n ''\n return_type=None\n for arg in args:\n if arg is None :\n continue\n if isinstance(arg,bytes):\n if return_type is str:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=bytes\n else :\n if return_type is bytes:\n raise TypeError(\"Can't mix bytes and non-bytes in \"\n \"path components.\")\n return_type=str\n if return_type is None :\n return str\n return return_type\n \n \ndef _sanitize_params(prefix,suffix,dir):\n ''\n output_type=_infer_return_type(prefix,suffix,dir)\n if suffix is None :\n suffix=output_type()\n if prefix is None :\n if output_type is str:\n prefix=template\n else :\n prefix=_os.fsencode(template)\n if dir is None :\n if output_type is str:\n dir=gettempdir()\n else :\n dir=gettempdirb()\n return prefix,suffix,dir,output_type\n \n \nclass _RandomNameSequence:\n ''\n\n\n\n\n \n \n characters=\"abcdefghijklmnopqrstuvwxyz0123456789_\"\n \n @property\n def rng(self):\n cur_pid=_os.getpid()\n if cur_pid !=getattr(self,'_rng_pid',None ):\n self._rng=_Random()\n self._rng_pid=cur_pid\n return self._rng\n \n def __iter__(self):\n return self\n \n def __next__(self):\n c=self.characters\n choose=self.rng.choice\n letters=[choose(c)for dummy in range(8)]\n return ''.join(letters)\n \ndef _candidate_tempdir_list():\n ''\n \n \n dirlist=[]\n \n \n for envname in 'TMPDIR','TEMP','TMP':\n dirname=_os.getenv(envname)\n if dirname:dirlist.append(dirname)\n \n \n if _os.name =='nt':\n dirlist.extend([_os.path.expanduser(r'~\\AppData\\Local\\Temp'),\n _os.path.expandvars(r'%SYSTEMROOT%\\Temp'),\n r'c:\\temp',r'c:\\tmp',r'\\temp',r'\\tmp'])\n else :\n dirlist.extend(['/tmp','/var/tmp','/usr/tmp'])\n \n \n try :\n dirlist.append(_os.getcwd())\n except (AttributeError,OSError):\n dirlist.append(_os.curdir)\n \n return dirlist\n \ndef _get_default_tempdir():\n ''\n\n\n\n\n\n \n \n namer=_RandomNameSequence()\n dirlist=_candidate_tempdir_list()\n \n for dir in dirlist:\n if dir !=_os.curdir:\n dir=_os.path.abspath(dir)\n \n for seq in range(100):\n name=next(namer)\n filename=_os.path.join(dir,name)\n try :\n fd=_os.open(filename,_bin_openflags,0o600)\n try :\n try :\n with _io.open(fd,'wb',closefd=False )as fp:\n fp.write(b'blat')\n finally :\n _os.close(fd)\n finally :\n _os.unlink(filename)\n return dir\n except FileExistsError:\n pass\n except PermissionError:\n \n \n if (_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n break\n except OSError:\n break\n raise FileNotFoundError(_errno.ENOENT,\n \"No usable temporary directory found in %s\"%\n dirlist)\n \n_name_sequence=None\n\ndef _get_candidate_names():\n ''\n \n global _name_sequence\n if _name_sequence is None :\n _once_lock.acquire()\n try :\n if _name_sequence is None :\n _name_sequence=_RandomNameSequence()\n finally :\n _once_lock.release()\n return _name_sequence\n \n \ndef _mkstemp_inner(dir,pre,suf,flags,output_type):\n ''\n \n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,pre+name+suf)\n _sys.audit(\"tempfile.mkstemp\",file)\n try :\n fd=_os.open(file,flags,0o600)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if (_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else :\n raise\n return (fd,_os.path.abspath(file))\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary file name found\")\n \n \n \n \ndef gettempprefix():\n ''\n return template\n \ndef gettempprefixb():\n ''\n return _os.fsencode(gettempprefix())\n \ntempdir=None\n\ndef gettempdir():\n ''\n global tempdir\n if tempdir is None :\n _once_lock.acquire()\n try :\n if tempdir is None :\n tempdir=_get_default_tempdir()\n finally :\n _once_lock.release()\n return tempdir\n \ndef gettempdirb():\n ''\n return _os.fsencode(gettempdir())\n \ndef mkstemp(suffix=None ,prefix=None ,dir=None ,text=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n if text:\n flags=_text_openflags\n else :\n flags=_bin_openflags\n \n return _mkstemp_inner(dir,prefix,suffix,flags,output_type)\n \n \ndef mkdtemp(suffix=None ,prefix=None ,dir=None ):\n ''\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n names=_get_candidate_names()\n if output_type is bytes:\n names=map(_os.fsencode,names)\n \n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n _sys.audit(\"tempfile.mkdtemp\",file)\n try :\n _os.mkdir(file,0o700)\n except FileExistsError:\n continue\n except PermissionError:\n \n \n if (_os.name =='nt'and _os.path.isdir(dir)and\n _os.access(dir,_os.W_OK)):\n continue\n else :\n raise\n return file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary directory name found\")\n \ndef mktemp(suffix=\"\",prefix=template,dir=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n if dir is None :\n dir=gettempdir()\n \n names=_get_candidate_names()\n for seq in range(TMP_MAX):\n name=next(names)\n file=_os.path.join(dir,prefix+name+suffix)\n if not _exists(file):\n return file\n \n raise FileExistsError(_errno.EEXIST,\n \"No usable temporary filename found\")\n \n \nclass _TemporaryFileCloser:\n ''\n\n \n \n file=None\n close_called=False\n \n def __init__(self,file,name,delete=True ):\n self.file=file\n self.name=name\n self.delete=delete\n \n \n \n \n if _os.name !='nt':\n \n \n \n \n \n \n def close(self,unlink=_os.unlink):\n if not self.close_called and self.file is not None :\n self.close_called=True\n try :\n self.file.close()\n finally :\n if self.delete:\n unlink(self.name)\n \n \n def __del__(self):\n self.close()\n \n else :\n def close(self):\n if not self.close_called:\n self.close_called=True\n self.file.close()\n \n \nclass _TemporaryFileWrapper:\n ''\n\n\n\n\n \n \n def __init__(self,file,name,delete=True ):\n self.file=file\n self.name=name\n self.delete=delete\n self._closer=_TemporaryFileCloser(file,name,delete)\n \n def __getattr__(self,name):\n \n \n \n file=self.__dict__['file']\n a=getattr(file,name)\n if hasattr(a,'__call__'):\n func=a\n @_functools.wraps(func)\n def func_wrapper(*args,**kwargs):\n return func(*args,**kwargs)\n \n \n func_wrapper._closer=self._closer\n a=func_wrapper\n if not isinstance(a,int):\n setattr(self,name,a)\n return a\n \n \n \n def __enter__(self):\n self.file.__enter__()\n return self\n \n \n \n def __exit__(self,exc,value,tb):\n result=self.file.__exit__(exc,value,tb)\n self.close()\n return result\n \n def close(self):\n ''\n\n \n self._closer.close()\n \n \n def __iter__(self):\n \n \n \n \n \n for line in self.file:\n yield line\n \n \ndef NamedTemporaryFile(mode='w+b',buffering=-1,encoding=None ,\nnewline=None ,suffix=None ,prefix=None ,\ndir=None ,delete=True ,*,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n \n \n \n if _os.name =='nt'and delete:\n flags |=_os.O_TEMPORARY\n \n (fd,name)=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n try :\n file=_io.open(fd,mode,buffering=buffering,\n newline=newline,encoding=encoding,errors=errors)\n \n return _TemporaryFileWrapper(file,name,delete)\n except BaseException:\n _os.unlink(name)\n _os.close(fd)\n raise\n \nif _os.name !='posix'or _sys.platform =='cygwin':\n\n\n TemporaryFile=NamedTemporaryFile\n \nelse :\n\n\n\n _O_TMPFILE_WORKS=hasattr(_os,'O_TMPFILE')\n \n def TemporaryFile(mode='w+b',buffering=-1,encoding=None ,\n newline=None ,suffix=None ,prefix=None ,\n dir=None ,*,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n global _O_TMPFILE_WORKS\n \n prefix,suffix,dir,output_type=_sanitize_params(prefix,suffix,dir)\n \n flags=_bin_openflags\n if _O_TMPFILE_WORKS:\n try :\n flags2=(flags |_os.O_TMPFILE)&~_os.O_CREAT\n fd=_os.open(dir,flags2,0o600)\n except IsADirectoryError:\n \n \n \n \n \n _O_TMPFILE_WORKS=False\n except OSError:\n \n \n \n \n \n \n \n pass\n else :\n try :\n return _io.open(fd,mode,buffering=buffering,\n newline=newline,encoding=encoding,\n errors=errors)\n except :\n _os.close(fd)\n raise\n \n \n (fd,name)=_mkstemp_inner(dir,prefix,suffix,flags,output_type)\n try :\n _os.unlink(name)\n return _io.open(fd,mode,buffering=buffering,\n newline=newline,encoding=encoding,errors=errors)\n except :\n _os.close(fd)\n raise\n \nclass SpooledTemporaryFile:\n ''\n\n\n \n _rolled=False\n \n def __init__(self,max_size=0,mode='w+b',buffering=-1,\n encoding=None ,newline=None ,\n suffix=None ,prefix=None ,dir=None ,*,errors=None ):\n if 'b'in mode:\n self._file=_io.BytesIO()\n else :\n self._file=_io.TextIOWrapper(_io.BytesIO(),\n encoding=encoding,errors=errors,\n newline=newline)\n self._max_size=max_size\n self._rolled=False\n self._TemporaryFileArgs={'mode':mode,'buffering':buffering,\n 'suffix':suffix,'prefix':prefix,\n 'encoding':encoding,'newline':newline,\n 'dir':dir,'errors':errors}\n \n __class_getitem__=classmethod(_types.GenericAlias)\n \n def _check(self,file):\n if self._rolled:return\n max_size=self._max_size\n if max_size and file.tell()>max_size:\n self.rollover()\n \n def rollover(self):\n if self._rolled:return\n file=self._file\n newfile=self._file=TemporaryFile(**self._TemporaryFileArgs)\n del self._TemporaryFileArgs\n \n pos=file.tell()\n if hasattr(newfile,'buffer'):\n newfile.buffer.write(file.detach().getvalue())\n else :\n newfile.write(file.getvalue())\n newfile.seek(pos,0)\n \n self._rolled=True\n \n \n \n \n \n \n \n def __enter__(self):\n if self._file.closed:\n raise ValueError(\"Cannot enter context with closed file\")\n return self\n \n def __exit__(self,exc,value,tb):\n self._file.close()\n \n \n def __iter__(self):\n return self._file.__iter__()\n \n def close(self):\n self._file.close()\n \n @property\n def closed(self):\n return self._file.closed\n \n @property\n def encoding(self):\n return self._file.encoding\n \n @property\n def errors(self):\n return self._file.errors\n \n def fileno(self):\n self.rollover()\n return self._file.fileno()\n \n def flush(self):\n self._file.flush()\n \n def isatty(self):\n return self._file.isatty()\n \n @property\n def mode(self):\n try :\n return self._file.mode\n except AttributeError:\n return self._TemporaryFileArgs['mode']\n \n @property\n def name(self):\n try :\n return self._file.name\n except AttributeError:\n return None\n \n @property\n def newlines(self):\n return self._file.newlines\n \n def read(self,*args):\n return self._file.read(*args)\n \n def readline(self,*args):\n return self._file.readline(*args)\n \n def readlines(self,*args):\n return self._file.readlines(*args)\n \n def seek(self,*args):\n return self._file.seek(*args)\n \n def tell(self):\n return self._file.tell()\n \n def truncate(self,size=None ):\n if size is None :\n self._file.truncate()\n else :\n if size >self._max_size:\n self.rollover()\n self._file.truncate(size)\n \n def write(self,s):\n file=self._file\n rv=file.write(s)\n self._check(file)\n return rv\n \n def writelines(self,iterable):\n file=self._file\n rv=file.writelines(iterable)\n self._check(file)\n return rv\n \n \nclass TemporaryDirectory(object):\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,suffix=None ,prefix=None ,dir=None ):\n self.name=mkdtemp(suffix,prefix,dir)\n self._finalizer=_weakref.finalize(\n self,self._cleanup,self.name,\n warn_message=\"Implicitly cleaning up {!r}\".format(self))\n \n @classmethod\n def _rmtree(cls,name):\n def onerror(func,path,exc_info):\n if issubclass(exc_info[0],PermissionError):\n def resetperms(path):\n try :\n _os.chflags(path,0)\n except AttributeError:\n pass\n _os.chmod(path,0o700)\n \n try :\n if path !=name:\n resetperms(_os.path.dirname(path))\n resetperms(path)\n \n try :\n _os.unlink(path)\n \n except (IsADirectoryError,PermissionError):\n cls._rmtree(path)\n except FileNotFoundError:\n pass\n elif issubclass(exc_info[0],FileNotFoundError):\n pass\n else :\n raise\n \n _shutil.rmtree(name,onerror=onerror)\n \n @classmethod\n def _cleanup(cls,name,warn_message):\n cls._rmtree(name)\n _warnings.warn(warn_message,ResourceWarning)\n \n def __repr__(self):\n return \"<{} {!r}>\".format(self.__class__.__name__,self.name)\n \n def __enter__(self):\n return self.name\n \n def __exit__(self,exc,value,tb):\n self.cleanup()\n \n def cleanup(self):\n if self._finalizer.detach():\n self._rmtree(self.name)\n \n __class_getitem__=classmethod(_types.GenericAlias)\n", ["_thread", "errno", "functools", "io", "os", "random", "shutil", "sys", "types", "warnings", "weakref"]], "textwrap": [".py", "''\n\n\n\n\n\n\nimport re\n\n__all__=['TextWrapper','wrap','fill','dedent','indent','shorten']\n\n\n\n\n_whitespace='\\t\\n\\x0b\\x0c\\r '\n\nclass TextWrapper:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n unicode_whitespace_trans={}\n uspace=ord(' ')\n for x in _whitespace:\n unicode_whitespace_trans[ord(x)]=uspace\n \n \n \n \n \n \n \n word_punct=r'[\\w!\"\\'&.,?]'\n letter=r'[^\\d\\W]'\n whitespace=r'[%s]'%re.escape(_whitespace)\n nowhitespace='[^'+whitespace[1:]\n wordsep_re=re.compile(r'''\n ( # any whitespace\n %(ws)s+\n | # em-dash between words\n (?<=%(wp)s) -{2,} (?=\\w)\n | # word, possibly hyphenated\n %(nws)s+? (?:\n # hyphenated word\n -(?: (?<=%(lt)s{2}-) | (?<=%(lt)s-%(lt)s-))\n (?= %(lt)s -? %(lt)s)\n | # end of word\n (?=%(ws)s|\\Z)\n | # em-dash\n (?<=%(wp)s) (?=-{2,}\\w)\n )\n )'''%{'wp':word_punct,'lt':letter,\n 'ws':whitespace,'nws':nowhitespace},\n re.VERBOSE)\n del word_punct,letter,nowhitespace\n \n \n \n \n \n wordsep_simple_re=re.compile(r'(%s+)'%whitespace)\n del whitespace\n \n \n \n sentence_end_re=re.compile(r'[a-z]'\n r'[\\.\\!\\?]'\n r'[\\\"\\']?'\n r'\\Z')\n \n def __init__(self,\n width=70,\n initial_indent=\"\",\n subsequent_indent=\"\",\n expand_tabs=True ,\n replace_whitespace=True ,\n fix_sentence_endings=False ,\n break_long_words=True ,\n drop_whitespace=True ,\n break_on_hyphens=True ,\n tabsize=8,\n *,\n max_lines=None ,\n placeholder=' [...]'):\n self.width=width\n self.initial_indent=initial_indent\n self.subsequent_indent=subsequent_indent\n self.expand_tabs=expand_tabs\n self.replace_whitespace=replace_whitespace\n self.fix_sentence_endings=fix_sentence_endings\n self.break_long_words=break_long_words\n self.drop_whitespace=drop_whitespace\n self.break_on_hyphens=break_on_hyphens\n self.tabsize=tabsize\n self.max_lines=max_lines\n self.placeholder=placeholder\n \n \n \n \n \n def _munge_whitespace(self,text):\n ''\n\n\n\n\n \n if self.expand_tabs:\n text=text.expandtabs(self.tabsize)\n if self.replace_whitespace:\n text=text.translate(self.unicode_whitespace_trans)\n return text\n \n \n def _split(self,text):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.break_on_hyphens is True :\n chunks=self.wordsep_re.split(text)\n else :\n chunks=self.wordsep_simple_re.split(text)\n chunks=[c for c in chunks if c]\n return chunks\n \n def _fix_sentence_endings(self,chunks):\n ''\n\n\n\n\n\n\n \n i=0\n patsearch=self.sentence_end_re.search\n while i 0)\"%self.width)\n if self.max_lines is not None :\n if self.max_lines >1:\n indent=self.subsequent_indent\n else :\n indent=self.initial_indent\n if len(indent)+len(self.placeholder.lstrip())>self.width:\n raise ValueError(\"placeholder too large for max width\")\n \n \n \n chunks.reverse()\n \n while chunks:\n \n \n \n cur_line=[]\n cur_len=0\n \n \n if lines:\n indent=self.subsequent_indent\n else :\n indent=self.initial_indent\n \n \n width=self.width -len(indent)\n \n \n \n if self.drop_whitespace and chunks[-1].strip()==''and lines:\n del chunks[-1]\n \n while chunks:\n l=len(chunks[-1])\n \n \n if cur_len+l <=width:\n cur_line.append(chunks.pop())\n cur_len +=l\n \n \n else :\n break\n \n \n \n if chunks and len(chunks[-1])>width:\n self._handle_long_word(chunks,cur_line,cur_len,width)\n cur_len=sum(map(len,cur_line))\n \n \n if self.drop_whitespace and cur_line and cur_line[-1].strip()=='':\n cur_len -=len(cur_line[-1])\n del cur_line[-1]\n \n if cur_line:\n if (self.max_lines is None or\n len(lines)+1 \"%(\n \"locked\"if self._block.locked()else \"unlocked\",\n self.__class__.__module__,\n self.__class__.__qualname__,\n owner,\n self._count,\n hex(id(self))\n )\n \n def _at_fork_reinit(self):\n self._block._at_fork_reinit()\n self._owner=None\n self._count=0\n \n def acquire(self,blocking=True ,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n me=get_ident()\n if self._owner ==me:\n self._count +=1\n return 1\n rc=self._block.acquire(blocking,timeout)\n if rc:\n self._owner=me\n self._count=1\n return rc\n \n __enter__=acquire\n \n def release(self):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self._owner !=get_ident():\n raise RuntimeError(\"cannot release un-acquired lock\")\n self._count=count=self._count -1\n if not count:\n self._owner=None\n self._block.release()\n \n def __exit__(self,t,v,tb):\n self.release()\n \n \n \n def _acquire_restore(self,state):\n self._block.acquire()\n self._count,self._owner=state\n \n def _release_save(self):\n if self._count ==0:\n raise RuntimeError(\"cannot release un-acquired lock\")\n count=self._count\n self._count=0\n owner=self._owner\n self._owner=None\n self._block.release()\n return (count,owner)\n \n def _is_owned(self):\n return self._owner ==get_ident()\n \n_PyRLock=_RLock\n\n\nclass Condition:\n ''\n\n\n\n\n\n\n\n\n \n \n def __init__(self,lock=None ):\n if lock is None :\n lock=RLock()\n self._lock=lock\n \n self.acquire=lock.acquire\n self.release=lock.release\n \n \n \n try :\n self._release_save=lock._release_save\n except AttributeError:\n pass\n try :\n self._acquire_restore=lock._acquire_restore\n except AttributeError:\n pass\n try :\n self._is_owned=lock._is_owned\n except AttributeError:\n pass\n self._waiters=_deque()\n \n def _at_fork_reinit(self):\n self._lock._at_fork_reinit()\n self._waiters.clear()\n \n def __enter__(self):\n return self._lock.__enter__()\n \n def __exit__(self,*args):\n return self._lock.__exit__(*args)\n \n def __repr__(self):\n return \"\"%(self._lock,len(self._waiters))\n \n def _release_save(self):\n self._lock.release()\n \n def _acquire_restore(self,x):\n self._lock.acquire()\n \n def _is_owned(self):\n \n \n if self._lock.acquire(False ):\n self._lock.release()\n return False\n else :\n return True\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not self._is_owned():\n raise RuntimeError(\"cannot wait on un-acquired lock\")\n waiter=_allocate_lock()\n waiter.acquire()\n self._waiters.append(waiter)\n saved_state=self._release_save()\n gotit=False\n try :\n if timeout is None :\n waiter.acquire()\n gotit=True\n else :\n if timeout >0:\n gotit=waiter.acquire(True ,timeout)\n else :\n gotit=waiter.acquire(False )\n return gotit\n finally :\n self._acquire_restore(saved_state)\n if not gotit:\n try :\n self._waiters.remove(waiter)\n except ValueError:\n pass\n \n def wait_for(self,predicate,timeout=None ):\n ''\n\n\n\n\n\n \n endtime=None\n waittime=timeout\n result=predicate()\n while not result:\n if waittime is not None :\n if endtime is None :\n endtime=_time()+waittime\n else :\n waittime=endtime -_time()\n if waittime <=0:\n break\n self.wait(waittime)\n result=predicate()\n return result\n \n def notify(self,n=1):\n ''\n\n\n\n\n\n\n\n \n if not self._is_owned():\n raise RuntimeError(\"cannot notify on un-acquired lock\")\n all_waiters=self._waiters\n waiters_to_notify=_deque(_islice(all_waiters,n))\n if not waiters_to_notify:\n return\n for waiter in waiters_to_notify:\n waiter.release()\n try :\n all_waiters.remove(waiter)\n except ValueError:\n pass\n \n def notify_all(self):\n ''\n\n\n\n\n \n self.notify(len(self._waiters))\n \n notifyAll=notify_all\n \n \nclass Semaphore:\n ''\n\n\n\n\n\n\n \n \n \n \n def __init__(self,value=1):\n if value <0:\n raise ValueError(\"semaphore initial value must be >= 0\")\n self._cond=Condition(Lock())\n self._value=value\n \n def acquire(self,blocking=True ,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not blocking and timeout is not None :\n raise ValueError(\"can't specify timeout for non-blocking acquire\")\n rc=False\n endtime=None\n with self._cond:\n while self._value ==0:\n if not blocking:\n break\n if timeout is not None :\n if endtime is None :\n endtime=_time()+timeout\n else :\n timeout=endtime -_time()\n if timeout <=0:\n break\n self._cond.wait(timeout)\n else :\n self._value -=1\n rc=True\n return rc\n \n __enter__=acquire\n \n def release(self,n=1):\n ''\n\n\n\n\n \n if n <1:\n raise ValueError('n must be one or more')\n with self._cond:\n self._value +=n\n for i in range(n):\n self._cond.notify()\n \n def __exit__(self,t,v,tb):\n self.release()\n \n \nclass BoundedSemaphore(Semaphore):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,value=1):\n Semaphore.__init__(self,value)\n self._initial_value=value\n \n def release(self,n=1):\n ''\n\n\n\n\n\n\n\n \n if n <1:\n raise ValueError('n must be one or more')\n with self._cond:\n if self._value+n >self._initial_value:\n raise ValueError(\"Semaphore released too many times\")\n self._value +=n\n for i in range(n):\n self._cond.notify()\n \n \nclass Event:\n ''\n\n\n\n\n\n \n \n \n \n def __init__(self):\n self._cond=Condition(Lock())\n self._flag=False\n \n def _at_fork_reinit(self):\n \n self._cond._at_fork_reinit()\n \n def is_set(self):\n ''\n return self._flag\n \n isSet=is_set\n \n def set(self):\n ''\n\n\n\n\n \n with self._cond:\n self._flag=True\n self._cond.notify_all()\n \n def clear(self):\n ''\n\n\n\n\n \n with self._cond:\n self._flag=False\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n with self._cond:\n signaled=self._flag\n if not signaled:\n signaled=self._cond.wait(timeout)\n return signaled\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass Barrier:\n ''\n\n\n\n\n\n \n \n def __init__(self,parties,action=None ,timeout=None ):\n ''\n\n\n\n\n\n\n \n self._cond=Condition(Lock())\n self._action=action\n self._timeout=timeout\n self._parties=parties\n self._state=0\n self._count=0\n \n def wait(self,timeout=None ):\n ''\n\n\n\n\n\n\n \n if timeout is None :\n timeout=self._timeout\n with self._cond:\n self._enter()\n index=self._count\n self._count +=1\n try :\n if index+1 ==self._parties:\n \n self._release()\n else :\n \n self._wait(timeout)\n return index\n finally :\n self._count -=1\n \n self._exit()\n \n \n \n def _enter(self):\n while self._state in (-1,1):\n \n self._cond.wait()\n \n if self._state <0:\n raise BrokenBarrierError\n assert self._state ==0\n \n \n \n def _release(self):\n try :\n if self._action:\n self._action()\n \n self._state=1\n self._cond.notify_all()\n except :\n \n self._break()\n raise\n \n \n \n def _wait(self,timeout):\n if not self._cond.wait_for(lambda :self._state !=0,timeout):\n \n self._break()\n raise BrokenBarrierError\n if self._state <0:\n raise BrokenBarrierError\n assert self._state ==1\n \n \n \n def _exit(self):\n if self._count ==0:\n if self._state in (-1,1):\n \n self._state=0\n self._cond.notify_all()\n \n def reset(self):\n ''\n\n\n\n\n \n with self._cond:\n if self._count >0:\n if self._state ==0:\n \n self._state=-1\n elif self._state ==-2:\n \n \n self._state=-1\n else :\n self._state=0\n self._cond.notify_all()\n \n def abort(self):\n ''\n\n\n\n\n \n with self._cond:\n self._break()\n \n def _break(self):\n \n \n self._state=-2\n self._cond.notify_all()\n \n @property\n def parties(self):\n ''\n return self._parties\n \n @property\n def n_waiting(self):\n ''\n \n \n if self._state ==0:\n return self._count\n return 0\n \n @property\n def broken(self):\n ''\n return self._state ==-2\n \n \nclass BrokenBarrierError(RuntimeError):\n pass\n \n \n \n_counter=_count().__next__\n_counter()\ndef _newname(template=\"Thread-%d\"):\n return template %_counter()\n \n \n_active_limbo_lock=_allocate_lock()\n_active={}\n_limbo={}\n_dangling=WeakSet()\n\n\n\n_shutdown_locks_lock=_allocate_lock()\n_shutdown_locks=set()\n\n\n\nclass Thread:\n ''\n\n\n\n\n\n \n \n _initialized=False\n \n def __init__(self,group=None ,target=None ,name=None ,\n args=(),kwargs=None ,*,daemon=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n assert group is None ,\"group argument must be None for now\"\n if kwargs is None :\n kwargs={}\n self._target=target\n self._name=str(name or _newname())\n self._args=args\n self._kwargs=kwargs\n if daemon is not None :\n self._daemonic=daemon\n else :\n self._daemonic=current_thread().daemon\n self._ident=None\n if _HAVE_THREAD_NATIVE_ID:\n self._native_id=None\n self._tstate_lock=None\n self._started=Event()\n self._is_stopped=False\n self._initialized=True\n \n self._stderr=_sys.stderr\n self._invoke_excepthook=_make_invoke_excepthook()\n \n _dangling.add(self)\n \n def _reset_internal_locks(self,is_alive):\n \n \n self._started._at_fork_reinit()\n if is_alive:\n self._tstate_lock._at_fork_reinit()\n self._tstate_lock.acquire()\n else :\n \n \n self._is_stopped=True\n self._tstate_lock=None\n \n def __repr__(self):\n assert self._initialized,\"Thread.__init__() was not called\"\n status=\"initial\"\n if self._started.is_set():\n status=\"started\"\n self.is_alive()\n if self._is_stopped:\n status=\"stopped\"\n if self._daemonic:\n status +=\" daemon\"\n if self._ident is not None :\n status +=\" %s\"%self._ident\n return \"<%s(%s, %s)>\"%(self.__class__.__name__,self._name,status)\n \n def start(self):\n ''\n\n\n\n\n\n\n\n \n if not self._initialized:\n raise RuntimeError(\"thread.__init__() not called\")\n \n if self._started.is_set():\n raise RuntimeError(\"threads can only be started once\")\n \n with _active_limbo_lock:\n _limbo[self]=self\n try :\n _start_new_thread(self._bootstrap,())\n except Exception:\n with _active_limbo_lock:\n del _limbo[self]\n raise\n self._started.wait()\n \n def run(self):\n ''\n\n\n\n\n\n\n \n try :\n if self._target:\n self._target(*self._args,**self._kwargs)\n finally :\n \n \n del self._target,self._args,self._kwargs\n \n def _bootstrap(self):\n \n \n \n \n \n \n \n \n \n \n \n \n try :\n self._bootstrap_inner()\n except :\n if self._daemonic and _sys is None :\n return\n raise\n \n def _set_ident(self):\n self._ident=get_ident()\n \n if _HAVE_THREAD_NATIVE_ID:\n def _set_native_id(self):\n self._native_id=get_native_id()\n \n def _set_tstate_lock(self):\n ''\n\n\n \n self._tstate_lock=_set_sentinel()\n self._tstate_lock.acquire()\n \n if not self.daemon:\n with _shutdown_locks_lock:\n _shutdown_locks.add(self._tstate_lock)\n \n def _bootstrap_inner(self):\n try :\n self._set_ident()\n self._set_tstate_lock()\n if _HAVE_THREAD_NATIVE_ID:\n self._set_native_id()\n self._started.set()\n with _active_limbo_lock:\n _active[self._ident]=self\n del _limbo[self]\n \n if _trace_hook:\n _sys.settrace(_trace_hook)\n if _profile_hook:\n _sys.setprofile(_profile_hook)\n \n try :\n self.run()\n except :\n self._invoke_excepthook(self)\n finally :\n with _active_limbo_lock:\n try :\n \n \n del _active[get_ident()]\n except :\n pass\n \n def _stop(self):\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n lock=self._tstate_lock\n if lock is not None :\n assert not lock.locked()\n self._is_stopped=True\n self._tstate_lock=None\n if not self.daemon:\n with _shutdown_locks_lock:\n _shutdown_locks.discard(lock)\n \n def _delete(self):\n ''\n with _active_limbo_lock:\n del _active[get_ident()]\n \n \n \n \n \n def join(self,timeout=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not self._initialized:\n raise RuntimeError(\"Thread.__init__() not called\")\n if not self._started.is_set():\n raise RuntimeError(\"cannot join thread before it is started\")\n if self is current_thread():\n raise RuntimeError(\"cannot join current thread\")\n \n if timeout is None :\n self._wait_for_tstate_lock()\n else :\n \n \n self._wait_for_tstate_lock(timeout=max(timeout,0))\n \n def _wait_for_tstate_lock(self,block=True ,timeout=-1):\n \n \n \n \n \n \n lock=self._tstate_lock\n if lock is None :\n assert self._is_stopped\n elif lock.acquire(block,timeout):\n lock.release()\n self._stop()\n \n @property\n def name(self):\n ''\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._name\n \n @name.setter\n def name(self,name):\n assert self._initialized,\"Thread.__init__() not called\"\n self._name=str(name)\n \n @property\n def ident(self):\n ''\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._ident\n \n if _HAVE_THREAD_NATIVE_ID:\n @property\n def native_id(self):\n ''\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._native_id\n \n def is_alive(self):\n ''\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n if self._is_stopped or not self._started.is_set():\n return False\n self._wait_for_tstate_lock(False )\n return not self._is_stopped\n \n @property\n def daemon(self):\n ''\n\n\n\n\n\n\n\n\n \n assert self._initialized,\"Thread.__init__() not called\"\n return self._daemonic\n \n @daemon.setter\n def daemon(self,daemonic):\n if not self._initialized:\n raise RuntimeError(\"Thread.__init__() not called\")\n if self._started.is_set():\n raise RuntimeError(\"cannot set daemon status of active thread\")\n self._daemonic=daemonic\n \n def isDaemon(self):\n return self.daemon\n \n def setDaemon(self,daemonic):\n self.daemon=daemonic\n \n def getName(self):\n return self.name\n \n def setName(self,name):\n self.name=name\n \n \ntry :\n from _thread import (_excepthook as excepthook,\n _ExceptHookArgs as ExceptHookArgs)\nexcept ImportError:\n\n from traceback import print_exception as _print_exception\n from collections import namedtuple\n \n _ExceptHookArgs=namedtuple(\n 'ExceptHookArgs',\n 'exc_type exc_value exc_traceback thread')\n \n def ExceptHookArgs(args):\n return _ExceptHookArgs(*args)\n \n def excepthook(args,/):\n ''\n\n \n if args.exc_type ==SystemExit:\n \n return\n \n if _sys is not None and _sys.stderr is not None :\n stderr=_sys.stderr\n elif args.thread is not None :\n stderr=args.thread._stderr\n if stderr is None :\n \n \n return\n else :\n \n return\n \n if args.thread is not None :\n name=args.thread.name\n else :\n name=get_ident()\n print(f\"Exception in thread {name}:\",\n file=stderr,flush=True )\n _print_exception(args.exc_type,args.exc_value,args.exc_traceback,\n file=stderr)\n stderr.flush()\n \n \ndef _make_invoke_excepthook():\n\n\n\n\n old_excepthook=excepthook\n old_sys_excepthook=_sys.excepthook\n if old_excepthook is None :\n raise RuntimeError(\"threading.excepthook is None\")\n if old_sys_excepthook is None :\n raise RuntimeError(\"sys.excepthook is None\")\n \n sys_exc_info=_sys.exc_info\n local_print=print\n local_sys=_sys\n \n def invoke_excepthook(thread):\n global excepthook\n try :\n hook=excepthook\n if hook is None :\n hook=old_excepthook\n \n args=ExceptHookArgs([*sys_exc_info(),thread])\n \n hook(args)\n except Exception as exc:\n exc.__suppress_context__=True\n del exc\n \n if local_sys is not None and local_sys.stderr is not None :\n stderr=local_sys.stderr\n else :\n stderr=thread._stderr\n \n local_print(\"Exception in threading.excepthook:\",\n file=stderr,flush=True )\n \n if local_sys is not None and local_sys.excepthook is not None :\n sys_excepthook=local_sys.excepthook\n else :\n sys_excepthook=old_sys_excepthook\n \n sys_excepthook(*sys_exc_info())\n finally :\n \n args=None\n \n return invoke_excepthook\n \n \n \n \nclass Timer(Thread):\n ''\n\n\n\n\n\n \n \n def __init__(self,interval,function,args=None ,kwargs=None ):\n Thread.__init__(self)\n self.interval=interval\n self.function=function\n self.args=args if args is not None else []\n self.kwargs=kwargs if kwargs is not None else {}\n self.finished=Event()\n \n def cancel(self):\n ''\n self.finished.set()\n \n def run(self):\n self.finished.wait(self.interval)\n if not self.finished.is_set():\n self.function(*self.args,**self.kwargs)\n self.finished.set()\n \n \n \n \nclass _MainThread(Thread):\n\n def __init__(self):\n Thread.__init__(self,name=\"MainThread\",daemon=False )\n self._set_tstate_lock()\n self._started.set()\n self._set_ident()\n if _HAVE_THREAD_NATIVE_ID:\n self._set_native_id()\n with _active_limbo_lock:\n _active[self._ident]=self\n \n \n \n \n \n \n \n \n \n \nclass _DummyThread(Thread):\n\n def __init__(self):\n Thread.__init__(self,name=_newname(\"Dummy-%d\"),daemon=True )\n \n self._started.set()\n self._set_ident()\n if _HAVE_THREAD_NATIVE_ID:\n self._set_native_id()\n with _active_limbo_lock:\n _active[self._ident]=self\n \n def _stop(self):\n pass\n \n def is_alive(self):\n assert not self._is_stopped and self._started.is_set()\n return True\n \n def join(self,timeout=None ):\n assert False ,\"cannot join a dummy thread\"\n \n \n \n \ndef current_thread():\n ''\n\n\n\n\n \n try :\n return _active[get_ident()]\n except KeyError:\n return _DummyThread()\n \ncurrentThread=current_thread\n\ndef active_count():\n ''\n\n\n\n\n \n with _active_limbo_lock:\n return len(_active)+len(_limbo)\n \nactiveCount=active_count\n\ndef _enumerate():\n\n return list(_active.values())+list(_limbo.values())\n \ndef enumerate():\n ''\n\n\n\n\n\n \n with _active_limbo_lock:\n return list(_active.values())+list(_limbo.values())\n \n \n_threading_atexits=[]\n_SHUTTING_DOWN=False\n\ndef _register_atexit(func,*arg,**kwargs):\n ''\n\n\n\n\n\n\n\n \n if _SHUTTING_DOWN:\n raise RuntimeError(\"can't register atexit after shutdown\")\n \n call=functools.partial(func,*arg,**kwargs)\n _threading_atexits.append(call)\n \n \nfrom _thread import stack_size\n\n\n\n\n\n_main_thread=_MainThread()\n\ndef _shutdown():\n ''\n\n \n \n \n \n \n \n if _main_thread._is_stopped:\n \n return\n \n global _SHUTTING_DOWN\n _SHUTTING_DOWN=True\n \n tlock=_main_thread._tstate_lock\n \n \n assert tlock is not None\n assert tlock.locked()\n tlock.release()\n _main_thread._stop()\n \n \n \n for atexit_call in reversed(_threading_atexits):\n atexit_call()\n \n \n while True :\n with _shutdown_locks_lock:\n locks=list(_shutdown_locks)\n _shutdown_locks.clear()\n \n if not locks:\n break\n \n for lock in locks:\n \n lock.acquire()\n lock.release()\n \n \n \n \n \ndef main_thread():\n ''\n\n\n\n \n return _main_thread\n \n \n \n \ntry :\n from _thread import _local as local\nexcept ImportError:\n from _threading_local import local\n \n \ndef _after_fork():\n ''\n\n \n \n \n global _active_limbo_lock,_main_thread\n global _shutdown_locks_lock,_shutdown_locks\n _active_limbo_lock=_allocate_lock()\n \n \n new_active={}\n \n try :\n current=_active[get_ident()]\n except KeyError:\n \n \n \n current=_MainThread()\n \n _main_thread=current\n \n \n _shutdown_locks_lock=_allocate_lock()\n _shutdown_locks=set()\n \n with _active_limbo_lock:\n \n \n threads=set(_enumerate())\n threads.update(_dangling)\n for thread in threads:\n \n \n if thread is current:\n \n \n thread._reset_internal_locks(True )\n ident=get_ident()\n thread._ident=ident\n new_active[ident]=thread\n else :\n \n thread._reset_internal_locks(False )\n thread._stop()\n \n _limbo.clear()\n _active.clear()\n _active.update(new_active)\n assert len(_active)==1\n \n \nif hasattr(_os,\"register_at_fork\"):\n _os.register_at_fork(after_in_child=_after_fork)\n", ["_collections", "_thread", "_threading_local", "_weakrefset", "collections", "functools", "itertools", "os", "sys", "time", "traceback"]], "time": [".py", "import _locale\nimport javascript\n\n\ndate=javascript.Date.new\nnow=javascript.Date.now\n\n\n\n\n\n\n\n_STRUCT_TM_ITEMS=9\n\n\n\n\n\ndef _get_day_of_year(arg):\n ''\n\n\n\n\n\n\n\n\n\n \n ml=[31,28,31,30,31,30,31,31,30,31,30,31]\n if arg[0]%4 ==0:\n ml[1]+=1\n i=1\n yday=0\n while i mm >13:\n raise ValueError(\"month out of range\")\n \n dd=t[2]\n if dd ==0:dd=1\n if -1 >dd >32:\n raise ValueError(\"day of month out of range\")\n \n hh=t[3]\n if -1 >hh >24:\n raise ValueError(\"hour out of range\")\n \n minu=t[4]\n if -1 >minu >60:\n raise ValueError(\"minute out of range\")\n \n ss=t[5]\n if -1 >ss >62:\n raise ValueError(\"seconds out of range\")\n \n wd=t[6]%7\n if wd <-2:\n raise ValueError(\"day of week out of range\")\n \n dy=t[7]\n if dy ==0:dy=1\n if -1 >dy >367:\n raise ValueError(\"day of year out of range\")\n \n return t[0],mm,dd,hh,minu,ss,wd,dy,t[-1]\n \n \ndef _is_dst(secs=None ):\n ''\n d=date()\n if secs is not None :\n d=date(secs *1000)\n \n \n \n jan=date(d.getFullYear(),0,1)\n jul=date(d.getFullYear(),6,1)\n dst=int(d.getTimezoneOffset()=0 else 6\n tmp=struct_time([d.getUTCFullYear(),\n d.getUTCMonth()+1,d.getUTCDate(),\n d.getUTCHours(),d.getUTCMinutes(),d.getUTCSeconds(),\n wday,0,0])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef localtime(secs=None ):\n d=date()\n if secs is not None :\n d=date(secs *1000)\n dst=_is_dst(secs)\n wday=d.getDay()-1 if d.getDay()-1 >=0 else 6\n tmp=struct_time([d.getFullYear(),\n d.getMonth()+1,d.getDate(),\n d.getHours(),d.getMinutes(),d.getSeconds(),\n wday,0,dst])\n tmp.args[7]=_get_day_of_year(tmp.args)\n return tmp\n \ndef mktime(t):\n if isinstance(t,struct_time):\n d1=date(t.tm_year,t.tm_mon -1,t.tm_mday,\n t.tm_hour,t.tm_min,t.tm_sec,0).getTime()\n elif isinstance(t,tuple):\n d1=date(t[0],t[1]-1,t[2],t[3],t[4],t[5],0).getTime()\n else :\n raise ValueError(\"Tuple or struct_time argument required\")\n d2=date(0).getTime()\n return (d1 -d2)/1000.\n \ndef monotonic():\n return now()/1000.\n \ndef perf_counter():\n return now()/1000.\n \ndef process_time():\n return now()/1000.\n \ndef time():\n return float(date().getTime()/1000)\n \ndef sleep(secs):\n ''\n\n \n raise NotImplementedError(\"Blocking functions like time.sleep() are not \"\n \"supported in the browser. Use functions in module browser.timer \"\n \"instead.\")\n \ndef strftime(_format,t=None ):\n def ns(t,nb):\n \n res=str(t)\n while len(res) float\n repeat(string, string) -> list\n default_timer() -> float\n\n\"\"\"\n\nimport gc\nimport sys\nimport time\nimport itertools\n\n__all__=[\"Timer\",\"timeit\",\"repeat\",\"default_timer\"]\n\ndummy_src_name=\"\"\ndefault_number=1000000\ndefault_repeat=5\ndefault_timer=time.perf_counter\n\n_globals=globals\n\n\n\n\ntemplate=\"\"\"\ndef inner(_it, _timer{init}):\n {setup}\n _t0 = _timer()\n for _i in _it:\n {stmt}\n _t1 = _timer()\n return _t1 - _t0\n\"\"\"\n\ndef reindent(src,indent):\n ''\n return src.replace(\"\\n\",\"\\n\"+\" \"*indent)\n \nclass Timer:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,stmt=\"pass\",setup=\"pass\",timer=default_timer,\n globals=None ):\n ''\n self.timer=timer\n local_ns={}\n global_ns=_globals()if globals is None else globals\n init=''\n if isinstance(setup,str):\n \n compile(setup,dummy_src_name,\"exec\")\n stmtprefix=setup+'\\n'\n setup=reindent(setup,4)\n elif callable(setup):\n local_ns['_setup']=setup\n init +=', _setup=_setup'\n stmtprefix=''\n setup='_setup()'\n else :\n raise ValueError(\"setup is neither a string nor callable\")\n if isinstance(stmt,str):\n \n compile(stmtprefix+stmt,dummy_src_name,\"exec\")\n stmt=reindent(stmt,8)\n elif callable(stmt):\n local_ns['_stmt']=stmt\n init +=', _stmt=_stmt'\n stmt='_stmt()'\n else :\n raise ValueError(\"stmt is neither a string nor callable\")\n src=template.format(stmt=stmt,setup=setup,init=init)\n self.src=src\n code=compile(src,dummy_src_name,\"exec\")\n exec(code,global_ns,local_ns)\n self.inner=local_ns[\"inner\"]\n \n def print_exc(self,file=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n import linecache,traceback\n if self.src is not None :\n linecache.cache[dummy_src_name]=(len(self.src),\n None ,\n self.src.split(\"\\n\"),\n dummy_src_name)\n \n \n traceback.print_exc(file=file)\n \n def timeit(self,number=default_number):\n ''\n\n\n\n\n\n\n\n \n it=itertools.repeat(None ,number)\n gcold=gc.isenabled()\n gc.disable()\n try :\n timing=self.inner(it,self.timer)\n finally :\n if gcold:\n gc.enable()\n return timing\n \n def repeat(self,repeat=default_repeat,number=default_number):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n r=[]\n for i in range(repeat):\n t=self.timeit(number)\n r.append(t)\n return r\n \n def autorange(self,callback=None ):\n ''\n\n\n\n\n\n\n\n \n i=1\n while True :\n for j in 1,2,5:\n number=i *j\n time_taken=self.timeit(number)\n if callback:\n callback(number,time_taken)\n if time_taken >=0.2:\n return (number,time_taken)\n i *=10\n \ndef timeit(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nnumber=default_number,globals=None ):\n ''\n return Timer(stmt,setup,timer,globals).timeit(number)\n \ndef repeat(stmt=\"pass\",setup=\"pass\",timer=default_timer,\nrepeat=default_repeat,number=default_number,globals=None ):\n ''\n return Timer(stmt,setup,timer,globals).repeat(repeat,number)\n \ndef main(args=None ,*,_wrap_timer=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if args is None :\n args=sys.argv[1:]\n import getopt\n try :\n opts,args=getopt.getopt(args,\"n:u:s:r:tcpvh\",\n [\"number=\",\"setup=\",\"repeat=\",\n \"time\",\"clock\",\"process\",\n \"verbose\",\"unit=\",\"help\"])\n except getopt.error as err:\n print(err)\n print(\"use -h/--help for command line help\")\n return 2\n \n timer=default_timer\n stmt=\"\\n\".join(args)or \"pass\"\n number=0\n setup=[]\n repeat=default_repeat\n verbose=0\n time_unit=None\n units={\"nsec\":1e-9,\"usec\":1e-6,\"msec\":1e-3,\"sec\":1.0}\n precision=3\n for o,a in opts:\n if o in (\"-n\",\"--number\"):\n number=int(a)\n if o in (\"-s\",\"--setup\"):\n setup.append(a)\n if o in (\"-u\",\"--unit\"):\n if a in units:\n time_unit=a\n else :\n print(\"Unrecognized unit. Please select nsec, usec, msec, or sec.\",\n file=sys.stderr)\n return 2\n if o in (\"-r\",\"--repeat\"):\n repeat=int(a)\n if repeat <=0:\n repeat=1\n if o in (\"-p\",\"--process\"):\n timer=time.process_time\n if o in (\"-v\",\"--verbose\"):\n if verbose:\n precision +=1\n verbose +=1\n if o in (\"-h\",\"--help\"):\n print(__doc__,end=' ')\n return 0\n setup=\"\\n\".join(setup)or \"pass\"\n \n \n \n \n import os\n sys.path.insert(0,os.curdir)\n if _wrap_timer is not None :\n timer=_wrap_timer(timer)\n \n t=Timer(stmt,setup,timer)\n if number ==0:\n \n callback=None\n if verbose:\n def callback(number,time_taken):\n msg=\"{num} loop{s} -> {secs:.{prec}g} secs\"\n plural=(number !=1)\n print(msg.format(num=number,s='s'if plural else '',\n secs=time_taken,prec=precision))\n try :\n number,_=t.autorange(callback)\n except :\n t.print_exc()\n return 1\n \n if verbose:\n print()\n \n try :\n raw_timings=t.repeat(repeat,number)\n except :\n t.print_exc()\n return 1\n \n def format_time(dt):\n unit=time_unit\n \n if unit is not None :\n scale=units[unit]\n else :\n scales=[(scale,unit)for unit,scale in units.items()]\n scales.sort(reverse=True )\n for scale,unit in scales:\n if dt >=scale:\n break\n \n return \"%.*g %s\"%(precision,dt /scale,unit)\n \n if verbose:\n print(\"raw times: %s\"%\", \".join(map(format_time,raw_timings)))\n print()\n timings=[dt /number for dt in raw_timings]\n \n best=min(timings)\n print(\"%d loop%s, best of %d: %s per loop\"\n %(number,'s'if number !=1 else '',\n repeat,format_time(best)))\n \n best=min(timings)\n worst=max(timings)\n if worst >=best *4:\n import warnings\n warnings.warn_explicit(\"The test results are likely unreliable. \"\n \"The worst time (%s) was more than four times \"\n \"slower than the best time (%s).\"\n %(format_time(worst),format_time(best)),\n UserWarning,'',0)\n return None\n \nif __name__ ==\"__main__\":\n sys.exit(main())\n", ["gc", "getopt", "itertools", "linecache", "os", "sys", "time", "traceback", "warnings"]], "token": [".py", "''\n\n\n__all__=['tok_name','ISTERMINAL','ISNONTERMINAL','ISEOF']\n\nENDMARKER=0\nNAME=1\nNUMBER=2\nSTRING=3\nNEWLINE=4\nINDENT=5\nDEDENT=6\nLPAR=7\nRPAR=8\nLSQB=9\nRSQB=10\nCOLON=11\nCOMMA=12\nSEMI=13\nPLUS=14\nMINUS=15\nSTAR=16\nSLASH=17\nVBAR=18\nAMPER=19\nLESS=20\nGREATER=21\nEQUAL=22\nDOT=23\nPERCENT=24\nLBRACE=25\nRBRACE=26\nEQEQUAL=27\nNOTEQUAL=28\nLESSEQUAL=29\nGREATEREQUAL=30\nTILDE=31\nCIRCUMFLEX=32\nLEFTSHIFT=33\nRIGHTSHIFT=34\nDOUBLESTAR=35\nPLUSEQUAL=36\nMINEQUAL=37\nSTAREQUAL=38\nSLASHEQUAL=39\nPERCENTEQUAL=40\nAMPEREQUAL=41\nVBAREQUAL=42\nCIRCUMFLEXEQUAL=43\nLEFTSHIFTEQUAL=44\nRIGHTSHIFTEQUAL=45\nDOUBLESTAREQUAL=46\nDOUBLESLASH=47\nDOUBLESLASHEQUAL=48\nAT=49\nATEQUAL=50\nRARROW=51\nELLIPSIS=52\nCOLONEQUAL=53\nOP=54\nAWAIT=55\nASYNC=56\nTYPE_IGNORE=57\nTYPE_COMMENT=58\n\nERRORTOKEN=59\nCOMMENT=60\nNL=61\nENCODING=62\nN_TOKENS=63\n\nNT_OFFSET=256\n\ntok_name={value:name\nfor name,value in globals().items()\nif isinstance(value,int)and not name.startswith('_')}\n__all__.extend(tok_name.values())\n\nEXACT_TOKEN_TYPES={\n'!=':NOTEQUAL,\n'%':PERCENT,\n'%=':PERCENTEQUAL,\n'&':AMPER,\n'&=':AMPEREQUAL,\n'(':LPAR,\n')':RPAR,\n'*':STAR,\n'**':DOUBLESTAR,\n'**=':DOUBLESTAREQUAL,\n'*=':STAREQUAL,\n'+':PLUS,\n'+=':PLUSEQUAL,\n',':COMMA,\n'-':MINUS,\n'-=':MINEQUAL,\n'->':RARROW,\n'.':DOT,\n'...':ELLIPSIS,\n'/':SLASH,\n'//':DOUBLESLASH,\n'//=':DOUBLESLASHEQUAL,\n'/=':SLASHEQUAL,\n':':COLON,\n':=':COLONEQUAL,\n';':SEMI,\n'<':LESS,\n'<<':LEFTSHIFT,\n'<<=':LEFTSHIFTEQUAL,\n'<=':LESSEQUAL,\n'=':EQUAL,\n'==':EQEQUAL,\n'>':GREATER,\n'>=':GREATEREQUAL,\n'>>':RIGHTSHIFT,\n'>>=':RIGHTSHIFTEQUAL,\n'@':AT,\n'@=':ATEQUAL,\n'[':LSQB,\n']':RSQB,\n'^':CIRCUMFLEX,\n'^=':CIRCUMFLEXEQUAL,\n'{':LBRACE,\n'|':VBAR,\n'|=':VBAREQUAL,\n'}':RBRACE,\n'~':TILDE,\n}\n\ndef ISTERMINAL(x):\n return x =NT_OFFSET\n \ndef ISEOF(x):\n return x ==ENDMARKER\n", []], "tokenize": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__author__='Ka-Ping Yee '\n__credits__=('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '\n'Skip Montanaro, Raymond Hettinger, Trent Nelson, '\n'Michael Foord')\nfrom builtins import open as _builtin_open\nfrom codecs import lookup,BOM_UTF8\nimport collections\nfrom io import TextIOWrapper\nimport itertools as _itertools\nimport re\nimport sys\nfrom token import *\nfrom token import EXACT_TOKEN_TYPES\n\ncookie_re=re.compile(r'^[ \\t\\f]*#.*?coding[:=][ \\t]*([-\\w.]+)',re.ASCII)\nblank_re=re.compile(br'^[ \\t\\f]*(?:[#\\r\\n]|$)',re.ASCII)\n\nimport token\n__all__=token.__all__+[\"tokenize\",\"generate_tokens\",\"detect_encoding\",\n\"untokenize\",\"TokenInfo\"]\ndel token\n\nclass TokenInfo(collections.namedtuple('TokenInfo','type string start end line')):\n def __repr__(self):\n annotated_type='%d (%s)'%(self.type,tok_name[self.type])\n return ('TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)'%\n self._replace(type=annotated_type))\n \n @property\n def exact_type(self):\n if self.type ==OP and self.string in EXACT_TOKEN_TYPES:\n return EXACT_TOKEN_TYPES[self.string]\n else :\n return self.type\n \ndef group(*choices):return '('+'|'.join(choices)+')'\ndef any(*choices):return group(*choices)+'*'\ndef maybe(*choices):return group(*choices)+'?'\n\n\n\nWhitespace=r'[ \\f\\t]*'\nComment=r'#[^\\r\\n]*'\nIgnore=Whitespace+any(r'\\\\\\r?\\n'+Whitespace)+maybe(Comment)\nName=r'\\w+'\n\nHexnumber=r'0[xX](?:_?[0-9a-fA-F])+'\nBinnumber=r'0[bB](?:_?[01])+'\nOctnumber=r'0[oO](?:_?[0-7])+'\nDecnumber=r'(?:0(?:_?0)*|[1-9](?:_?[0-9])*)'\nIntnumber=group(Hexnumber,Binnumber,Octnumber,Decnumber)\nExponent=r'[eE][-+]?[0-9](?:_?[0-9])*'\nPointfloat=group(r'[0-9](?:_?[0-9])*\\.(?:[0-9](?:_?[0-9])*)?',\nr'\\.[0-9](?:_?[0-9])*')+maybe(Exponent)\nExpfloat=r'[0-9](?:_?[0-9])*'+Exponent\nFloatnumber=group(Pointfloat,Expfloat)\nImagnumber=group(r'[0-9](?:_?[0-9])*[jJ]',Floatnumber+r'[jJ]')\nNumber=group(Imagnumber,Floatnumber,Intnumber)\n\n\ndef _all_string_prefixes():\n\n\n\n _valid_string_prefixes=['b','r','u','f','br','fr']\n \n result={''}\n for prefix in _valid_string_prefixes:\n for t in _itertools.permutations(prefix):\n \n \n for u in _itertools.product(*[(c,c.upper())for c in t]):\n result.add(''.join(u))\n return result\n \ndef _compile(expr):\n return re.compile(expr,re.UNICODE)\n \n \n \nStringPrefix=group(*_all_string_prefixes())\n\n\nSingle=r\"[^'\\\\]*(?:\\\\.[^'\\\\]*)*'\"\n\nDouble=r'[^\"\\\\]*(?:\\\\.[^\"\\\\]*)*\"'\n\nSingle3=r\"[^'\\\\]*(?:(?:\\\\.|'(?!''))[^'\\\\]*)*'''\"\n\nDouble3=r'[^\"\\\\]*(?:(?:\\\\.|\"(?!\"\"))[^\"\\\\]*)*\"\"\"'\nTriple=group(StringPrefix+\"'''\",StringPrefix+'\"\"\"')\n\nString=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*'\",\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*\"')\n\n\n\n\nSpecial=group(*map(re.escape,sorted(EXACT_TOKEN_TYPES,reverse=True )))\nFunny=group(r'\\r?\\n',Special)\n\nPlainToken=group(Number,Funny,String,Name)\nToken=Ignore+PlainToken\n\n\nContStr=group(StringPrefix+r\"'[^\\n'\\\\]*(?:\\\\.[^\\n'\\\\]*)*\"+\ngroup(\"'\",r'\\\\\\r?\\n'),\nStringPrefix+r'\"[^\\n\"\\\\]*(?:\\\\.[^\\n\"\\\\]*)*'+\ngroup('\"',r'\\\\\\r?\\n'))\nPseudoExtras=group(r'\\\\\\r?\\n|\\Z',Comment,Triple)\nPseudoToken=Whitespace+group(PseudoExtras,Number,Funny,ContStr,Name)\n\n\n\n\nendpats={}\nfor _prefix in _all_string_prefixes():\n endpats[_prefix+\"'\"]=Single\n endpats[_prefix+'\"']=Double\n endpats[_prefix+\"'''\"]=Single3\n endpats[_prefix+'\"\"\"']=Double3\n \n \n \nsingle_quoted=set()\ntriple_quoted=set()\nfor t in _all_string_prefixes():\n for u in (t+'\"',t+\"'\"):\n single_quoted.add(u)\n for u in (t+'\"\"\"',t+\"'''\"):\n triple_quoted.add(u)\n \ntabsize=8\n\nclass TokenError(Exception):pass\n\nclass StopTokenizing(Exception):pass\n\n\nclass Untokenizer:\n\n def __init__(self):\n self.tokens=[]\n self.prev_row=1\n self.prev_col=0\n self.encoding=None\n \n def add_whitespace(self,start):\n row,col=start\n if row =len(indent):\n self.tokens.append(indent)\n self.prev_col=len(indent)\n startline=False\n self.add_whitespace(start)\n self.tokens.append(token)\n self.prev_row,self.prev_col=end\n if tok_type in (NEWLINE,NL):\n self.prev_row +=1\n self.prev_col=0\n return \"\".join(self.tokens)\n \n def compat(self,token,iterable):\n indents=[]\n toks_append=self.tokens.append\n startline=token[0]in (NEWLINE,NL)\n prevstring=False\n \n for tok in _itertools.chain([token],iterable):\n toknum,tokval=tok[:2]\n if toknum ==ENCODING:\n self.encoding=tokval\n continue\n \n if toknum in (NAME,NUMBER):\n tokval +=' '\n \n \n if toknum ==STRING:\n if prevstring:\n tokval=' '+tokval\n prevstring=True\n else :\n prevstring=False\n \n if toknum ==INDENT:\n indents.append(tokval)\n continue\n elif toknum ==DEDENT:\n indents.pop()\n continue\n elif toknum in (NEWLINE,NL):\n startline=True\n elif startline and indents:\n toks_append(indents[-1])\n startline=False\n toks_append(tokval)\n \n \ndef untokenize(iterable):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ut=Untokenizer()\n out=ut.untokenize(iterable)\n if ut.encoding is not None :\n out=out.encode(ut.encoding)\n return out\n \n \ndef _get_normal_name(orig_enc):\n ''\n \n enc=orig_enc[:12].lower().replace(\"_\",\"-\")\n if enc ==\"utf-8\"or enc.startswith(\"utf-8-\"):\n return \"utf-8\"\n if enc in (\"latin-1\",\"iso-8859-1\",\"iso-latin-1\")or\\\n enc.startswith((\"latin-1-\",\"iso-8859-1-\",\"iso-latin-1-\")):\n return \"iso-8859-1\"\n return orig_enc\n \ndef detect_encoding(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n try :\n filename=readline.__self__.name\n except AttributeError:\n filename=None\n bom_found=False\n encoding=None\n default='utf-8'\n def read_or_stop():\n try :\n return readline()\n except StopIteration:\n return b''\n \n def find_cookie(line):\n try :\n \n \n \n line_string=line.decode('utf-8')\n except UnicodeDecodeError:\n msg=\"invalid or missing encoding declaration\"\n if filename is not None :\n msg='{} for {!r}'.format(msg,filename)\n raise SyntaxError(msg)\n \n match=cookie_re.match(line_string)\n if not match:\n return None\n encoding=_get_normal_name(match.group(1))\n try :\n codec=lookup(encoding)\n except LookupError:\n \n if filename is None :\n msg=\"unknown encoding: \"+encoding\n else :\n msg=\"unknown encoding for {!r}: {}\".format(filename,\n encoding)\n raise SyntaxError(msg)\n \n if bom_found:\n if encoding !='utf-8':\n \n if filename is None :\n msg='encoding problem: utf-8'\n else :\n msg='encoding problem for {!r}: utf-8'.format(filename)\n raise SyntaxError(msg)\n encoding +='-sig'\n return encoding\n \n first=read_or_stop()\n if first.startswith(BOM_UTF8):\n bom_found=True\n first=first[3:]\n default='utf-8-sig'\n if not first:\n return default,[]\n \n encoding=find_cookie(first)\n if encoding:\n return encoding,[first]\n if not blank_re.match(first):\n return default,[first]\n \n second=read_or_stop()\n if not second:\n return default,[first]\n \n encoding=find_cookie(second)\n if encoding:\n return encoding,[first,second]\n \n return default,[first,second]\n \n \ndef open(filename):\n ''\n\n \n buffer=_builtin_open(filename,'rb')\n try :\n encoding,lines=detect_encoding(buffer.readline)\n buffer.seek(0)\n text=TextIOWrapper(buffer,encoding,line_buffering=True )\n text.mode='r'\n return text\n except :\n buffer.close()\n raise\n \n \ndef tokenize(readline):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n encoding,consumed=detect_encoding(readline)\n empty=_itertools.repeat(b\"\")\n rl_gen=_itertools.chain(consumed,iter(readline,b\"\"),empty)\n return _tokenize(rl_gen.__next__,encoding)\n \n \ndef _tokenize(readline,encoding):\n lnum=parenlev=continued=0\n numchars='0123456789'\n contstr,needcont='',0\n contline=None\n indents=[0]\n \n if encoding is not None :\n if encoding ==\"utf-8-sig\":\n \n encoding=\"utf-8\"\n yield TokenInfo(ENCODING,encoding,(0,0),(0,0),'')\n last_line=b''\n line=b''\n while True :\n try :\n \n \n \n \n last_line=line\n line=readline()\n except StopIteration:\n line=b''\n \n if encoding is not None :\n line=line.decode(encoding)\n lnum +=1\n pos,max=0,len(line)\n \n if contstr:\n if not line:\n raise TokenError(\"EOF in multi-line string\",strstart)\n endmatch=endprog.match(line)\n if endmatch:\n pos=end=endmatch.end(0)\n yield TokenInfo(STRING,contstr+line[:end],\n strstart,(lnum,end),contline+line)\n contstr,needcont='',0\n contline=None\n elif needcont and line[-2:]!='\\\\\\n'and line[-3:]!='\\\\\\r\\n':\n yield TokenInfo(ERRORTOKEN,contstr+line,\n strstart,(lnum,len(line)),contline)\n contstr=''\n contline=None\n continue\n else :\n contstr=contstr+line\n contline=contline+line\n continue\n \n elif parenlev ==0 and not continued:\n if not line:break\n column=0\n while pos indents[-1]:\n indents.append(column)\n yield TokenInfo(INDENT,line[:pos],(lnum,0),(lnum,pos),line)\n while column \",lnum,pos,line))\n indents=indents[:-1]\n \n yield TokenInfo(DEDENT,'',(lnum,pos),(lnum,pos),line)\n \n else :\n if not line:\n raise TokenError(\"EOF in multi-line statement\",(lnum,0))\n continued=0\n \n while pos 0:\n yield TokenInfo(NL,token,spos,epos,line)\n else :\n yield TokenInfo(NEWLINE,token,spos,epos,line)\n \n elif initial =='#':\n assert not token.endswith(\"\\n\")\n yield TokenInfo(COMMENT,token,spos,epos,line)\n \n elif token in triple_quoted:\n endprog=_compile(endpats[token])\n endmatch=endprog.match(line,pos)\n if endmatch:\n pos=endmatch.end(0)\n token=line[start:pos]\n yield TokenInfo(STRING,token,spos,(lnum,pos),line)\n else :\n strstart=(lnum,start)\n contstr=line[start:]\n contline=line\n break\n \n \n \n \n \n \n \n \n \n \n \n elif (initial in single_quoted or\n token[:2]in single_quoted or\n token[:3]in single_quoted):\n if token[-1]=='\\n':\n strstart=(lnum,start)\n \n \n \n \n \n \n endprog=_compile(endpats.get(initial)or\n endpats.get(token[1])or\n endpats.get(token[2]))\n contstr,needcont=line[start:],1\n contline=line\n break\n else :\n yield TokenInfo(STRING,token,spos,epos,line)\n \n elif initial.isidentifier():\n yield TokenInfo(NAME,token,spos,epos,line)\n elif initial =='\\\\':\n continued=1\n else :\n if initial in '([{':\n parenlev +=1\n elif initial in ')]}':\n parenlev -=1\n yield TokenInfo(OP,token,spos,epos,line)\n else :\n yield TokenInfo(ERRORTOKEN,line[pos],\n (lnum,pos),(lnum,pos+1),line)\n pos +=1\n \n \n if last_line and last_line[-1]not in '\\r\\n':\n yield TokenInfo(NEWLINE,'',(lnum -1,len(last_line)),(lnum -1,len(last_line)+1),'')\n for indent in indents[1:]:\n yield TokenInfo(DEDENT,'',(lnum,0),(lnum,0),'')\n yield TokenInfo(ENDMARKER,'',(lnum,0),(lnum,0),'')\n \n \ndef generate_tokens(readline):\n ''\n\n\n\n \n return _tokenize(readline,None )\n \ndef main():\n import argparse\n \n \n def perror(message):\n sys.stderr.write(message)\n sys.stderr.write('\\n')\n \n def error(message,filename=None ,location=None ):\n if location:\n args=(filename,)+location+(message,)\n perror(\"%s:%d:%d: error: %s\"%args)\n elif filename:\n perror(\"%s: error: %s\"%(filename,message))\n else :\n perror(\"error: %s\"%message)\n sys.exit(1)\n \n \n parser=argparse.ArgumentParser(prog='python -m tokenize')\n parser.add_argument(dest='filename',nargs='?',\n metavar='filename.py',\n help='the file to tokenize; defaults to stdin')\n parser.add_argument('-e','--exact',dest='exact',action='store_true',\n help='display token names using the exact type')\n args=parser.parse_args()\n \n try :\n \n if args.filename:\n filename=args.filename\n with _builtin_open(filename,'rb')as f:\n tokens=list(tokenize(f.readline))\n else :\n filename=\"\"\n tokens=_tokenize(sys.stdin.readline,None )\n \n \n for token in tokens:\n token_type=token.type\n if args.exact:\n token_type=token.exact_type\n token_range=\"%d,%d-%d,%d:\"%(token.start+token.end)\n print(\"%-20s%-15s%-15r\"%\n (token_range,tok_name[token_type],token.string))\n except IndentationError as err:\n line,column=err.args[1][1:3]\n error(err.args[0],filename,(line,column))\n except TokenError as err:\n line,column=err.args[1]\n error(err.args[0],filename,(line,column))\n except SyntaxError as err:\n error(err,filename)\n except OSError as err:\n error(err)\n except KeyboardInterrupt:\n print(\"interrupted\\n\")\n except Exception as err:\n perror(\"unexpected error: %s\"%err)\n raise\n \nif __name__ ==\"__main__\":\n main()\n", ["argparse", "builtins", "codecs", "collections", "io", "itertools", "re", "sys", "token"]], "traceback": [".py", "''\n\nimport collections\nimport itertools\nimport linecache\nimport sys\n\n__all__=['extract_stack','extract_tb','format_exception',\n'format_exception_only','format_list','format_stack',\n'format_tb','print_exc','format_exc','print_exception',\n'print_last','print_stack','print_tb','clear_frames',\n'FrameSummary','StackSummary','TracebackException',\n'walk_stack','walk_tb']\n\n\n\n\n\ndef print_list(extracted_list,file=None ):\n ''\n \n if file is None :\n file=sys.stderr\n for item in StackSummary.from_list(extracted_list).format():\n print(item,file=file,end=\"\")\n \ndef format_list(extracted_list):\n ''\n\n\n\n\n\n\n\n\n\n \n return StackSummary.from_list(extracted_list).format()\n \n \n \n \n \ndef print_tb(tb,limit=None ,file=None ):\n ''\n\n\n\n\n\n \n print_list(extract_tb(tb,limit=limit),file=file)\n \ndef format_tb(tb,limit=None ):\n ''\n return extract_tb(tb,limit=limit).format()\n \ndef extract_tb(tb,limit=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n return StackSummary.extract(walk_tb(tb),limit=limit)\n \n \n \n \n \n_cause_message=(\n\"\\nThe above exception was the direct cause \"\n\"of the following exception:\\n\\n\")\n\n_context_message=(\n\"\\nDuring handling of the above exception, \"\n\"another exception occurred:\\n\\n\")\n\n\ndef print_exception(etype,value,tb,limit=None ,file=None ,chain=True ):\n ''\n\n\n\n\n\n\n\n\n \n \n \n \n if file is None :\n file=sys.stderr\n for line in TracebackException(\n type(value),value,tb,limit=limit).format(chain=chain):\n print(line,file=file,end=\"\")\n \n \ndef format_exception(etype,value,tb,limit=None ,chain=True ):\n ''\n\n\n\n\n\n\n \n \n \n \n return list(TracebackException(\n type(value),value,tb,limit=limit).format(chain=chain))\n \n \ndef format_exception_only(etype,value):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return list(TracebackException(etype,value,None ).format_exception_only())\n \n \n \n \ndef _format_final_exc_line(etype,value):\n valuestr=_some_str(value)\n if value is None or not valuestr:\n line=\"%s\\n\"%etype\n else :\n line=\"%s: %s\\n\"%(etype,valuestr)\n return line\n \ndef _some_str(value):\n try :\n return str(value)\n except :\n return ''%type(value).__name__\n \n \n \ndef print_exc(limit=None ,file=None ,chain=True ):\n ''\n print_exception(*sys.exc_info(),limit=limit,file=file,chain=chain)\n \ndef format_exc(limit=None ,chain=True ):\n ''\n return \"\".join(format_exception(*sys.exc_info(),limit=limit,chain=chain))\n \ndef print_last(limit=None ,file=None ,chain=True ):\n ''\n \n if not hasattr(sys,\"last_type\"):\n raise ValueError(\"no last exception\")\n print_exception(sys.last_type,sys.last_value,sys.last_traceback,\n limit,file,chain)\n \n \n \n \n \ndef print_stack(f=None ,limit=None ,file=None ):\n ''\n\n\n\n\n \n if f is None :\n f=sys._getframe().f_back\n print_list(extract_stack(f,limit=limit),file=file)\n \n \ndef format_stack(f=None ,limit=None ):\n ''\n if f is None :\n f=sys._getframe().f_back\n return format_list(extract_stack(f,limit=limit))\n \n \ndef extract_stack(f=None ,limit=None ):\n ''\n\n\n\n\n\n\n \n if f is None :\n f=sys._getframe().f_back\n stack=StackSummary.extract(walk_stack(f),limit=limit)\n stack.reverse()\n return stack\n \n \ndef clear_frames(tb):\n ''\n while tb is not None :\n try :\n tb.tb_frame.clear()\n except RuntimeError:\n \n pass\n tb=tb.tb_next\n \n \nclass FrameSummary:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('filename','lineno','name','_line','locals')\n \n def __init__(self,filename,lineno,name,*,lookup_line=True ,\n locals=None ,line=None ):\n ''\n\n\n\n\n\n\n\n \n self.filename=filename\n self.lineno=lineno\n self.name=name\n self._line=line\n if lookup_line:\n self.line\n self.locals={k:repr(v)for k,v in locals.items()}if locals else None\n \n def __eq__(self,other):\n if isinstance(other,FrameSummary):\n return (self.filename ==other.filename and\n self.lineno ==other.lineno and\n self.name ==other.name and\n self.locals ==other.locals)\n if isinstance(other,tuple):\n return (self.filename,self.lineno,self.name,self.line)==other\n return NotImplemented\n \n def __getitem__(self,pos):\n return (self.filename,self.lineno,self.name,self.line)[pos]\n \n def __iter__(self):\n return iter([self.filename,self.lineno,self.name,self.line])\n \n def __repr__(self):\n return \"\".format(\n filename=self.filename,lineno=self.lineno,name=self.name)\n \n def __len__(self):\n return 4\n \n @property\n def line(self):\n if self._line is None :\n self._line=linecache.getline(self.filename,self.lineno).strip()\n return self._line\n \n \ndef walk_stack(f):\n ''\n\n\n\n \n if f is None :\n f=sys._getframe().f_back.f_back\n while f is not None :\n yield f,f.f_lineno\n f=f.f_back\n \n \ndef walk_tb(tb):\n ''\n\n\n\n \n while tb is not None :\n yield tb.tb_frame,tb.tb_lineno\n tb=tb.tb_next\n \n \n_RECURSIVE_CUTOFF=3\n\nclass StackSummary(list):\n ''\n \n @classmethod\n def extract(klass,frame_gen,*,limit=None ,lookup_lines=True ,\n capture_locals=False ):\n ''\n\n\n\n\n\n\n\n\n\n \n if limit is None :\n limit=getattr(sys,'tracebacklimit',None )\n if limit is not None and limit <0:\n limit=0\n if limit is not None :\n if limit >=0:\n frame_gen=itertools.islice(frame_gen,limit)\n else :\n frame_gen=collections.deque(frame_gen,maxlen=-limit)\n \n result=klass()\n fnames=set()\n for f,lineno in frame_gen:\n co=f.f_code\n filename=co.co_filename\n name=co.co_name\n \n fnames.add(filename)\n linecache.lazycache(filename,f.f_globals)\n \n if capture_locals:\n f_locals=f.f_locals\n else :\n f_locals=None\n result.append(FrameSummary(\n filename,lineno,name,lookup_line=False ,locals=f_locals))\n for filename in fnames:\n linecache.checkcache(filename)\n \n if lookup_lines:\n for f in result:\n f.line\n return result\n \n @classmethod\n def from_list(klass,a_list):\n ''\n\n\n \n \n \n \n \n result=StackSummary()\n for frame in a_list:\n if isinstance(frame,FrameSummary):\n result.append(frame)\n else :\n filename,lineno,name,line=frame\n result.append(FrameSummary(filename,lineno,name,line=line))\n return result\n \n def format(self):\n ''\n\n\n\n\n\n\n\n\n\n \n result=[]\n last_file=None\n last_line=None\n last_name=None\n count=0\n for frame in self:\n if (last_file is None or last_file !=frame.filename or\n last_line is None or last_line !=frame.lineno or\n last_name is None or last_name !=frame.name):\n if count >_RECURSIVE_CUTOFF:\n count -=_RECURSIVE_CUTOFF\n result.append(\n f' [Previous line repeated {count} more '\n f'time{\"s\" if count > 1 else \"\"}]\\n'\n )\n last_file=frame.filename\n last_line=frame.lineno\n last_name=frame.name\n count=0\n count +=1\n if count >_RECURSIVE_CUTOFF:\n continue\n row=[]\n row.append(' File \"{}\", line {}, in {}\\n'.format(\n frame.filename,frame.lineno,frame.name))\n if frame.line:\n row.append(' {}\\n'.format(frame.line.strip()))\n if frame.locals:\n for name,value in sorted(frame.locals.items()):\n row.append(' {name} = {value}\\n'.format(name=name,value=value))\n result.append(''.join(row))\n if count >_RECURSIVE_CUTOFF:\n count -=_RECURSIVE_CUTOFF\n result.append(\n f' [Previous line repeated {count} more '\n f'time{\"s\" if count > 1 else \"\"}]\\n'\n )\n return result\n \n \nclass TracebackException:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,exc_type,exc_value,exc_traceback,*,limit=None ,\n lookup_lines=True ,capture_locals=False ,_seen=None ):\n \n \n \n \n if _seen is None :\n _seen=set()\n _seen.add(id(exc_value))\n \n \n if (exc_value and exc_value.__cause__ is not None\n and id(exc_value.__cause__)not in _seen):\n cause=TracebackException(\n type(exc_value.__cause__),\n exc_value.__cause__,\n exc_value.__cause__.__traceback__,\n limit=limit,\n lookup_lines=False ,\n capture_locals=capture_locals,\n _seen=_seen)\n else :\n cause=None\n if (exc_value and exc_value.__context__ is not None\n and id(exc_value.__context__)not in _seen):\n context=TracebackException(\n type(exc_value.__context__),\n exc_value.__context__,\n exc_value.__context__.__traceback__,\n limit=limit,\n lookup_lines=False ,\n capture_locals=capture_locals,\n _seen=_seen)\n else :\n context=None\n self.exc_traceback=exc_traceback\n self.__cause__=cause\n self.__context__=context\n self.__suppress_context__=\\\n exc_value.__suppress_context__ if exc_value else False\n \n self.stack=StackSummary.extract(\n walk_tb(exc_traceback),limit=limit,lookup_lines=lookup_lines,\n capture_locals=capture_locals)\n self.exc_type=exc_type\n \n \n self._str=_some_str(exc_value)\n if exc_type and issubclass(exc_type,SyntaxError):\n \n self.filename=exc_value.filename\n self.lineno=str(exc_value.lineno)\n self.text=exc_value.text\n self.offset=exc_value.offset\n self.msg=exc_value.msg\n if lookup_lines:\n self._load_lines()\n \n @classmethod\n def from_exception(cls,exc,*args,**kwargs):\n ''\n return cls(type(exc),exc,exc.__traceback__,*args,**kwargs)\n \n def _load_lines(self):\n ''\n for frame in self.stack:\n frame.line\n if self.__context__:\n self.__context__._load_lines()\n if self.__cause__:\n self.__cause__._load_lines()\n \n def __eq__(self,other):\n if isinstance(other,TracebackException):\n return self.__dict__ ==other.__dict__\n return NotImplemented\n \n def __str__(self):\n return self._str\n \n def format_exception_only(self):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if self.exc_type is None :\n yield _format_final_exc_line(None ,self._str)\n return\n \n stype=self.exc_type.__qualname__\n smod=self.exc_type.__module__\n if smod not in (\"__main__\",\"builtins\"):\n stype=smod+'.'+stype\n \n if not issubclass(self.exc_type,SyntaxError):\n yield _format_final_exc_line(stype,self._str)\n else :\n yield from self._format_syntax_error(stype)\n \n def _format_syntax_error(self,stype):\n ''\n \n filename=self.filename or \"\"\n lineno=str(self.lineno)or '?'\n yield ' File \"{}\", line {}\\n'.format(filename,lineno)\n \n text=self.text\n if text is not None :\n \n \n \n rtext=text.rstrip('\\n')\n ltext=rtext.lstrip(' \\n\\f')\n spaces=len(rtext)-len(ltext)\n yield ' {}\\n'.format(ltext)\n \n caret=(self.offset or 0)-1 -spaces\n if caret >=0:\n \n caretspace=((c if c.isspace()else ' ')for c in ltext[:caret])\n yield ' {}^\\n'.format(''.join(caretspace))\n msg=self.msg or \"\"\n yield \"{}: {}\\n\".format(stype,msg)\n \n def format(self,*,chain=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n if chain:\n if self.__cause__ is not None :\n yield from self.__cause__.format(chain=chain)\n yield _cause_message\n elif (self.__context__ is not None and\n not self.__suppress_context__):\n yield from self.__context__.format(chain=chain)\n yield _context_message\n if self.exc_traceback is not None :\n yield 'Traceback (most recent call last):\\n'\n yield from self.stack.format()\n yield from self.format_exception_only()\n", ["collections", "itertools", "linecache", "sys"]], "turtle": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport math\nimport sys\n\nfrom math import cos,sin\n\nfrom browser import console,document,html,timer\nimport _svg as svg\nimport copy\n\n\n\n\n\n_CFG={\n\n\n\"canvwidth\":500,\n\"canvheight\":500,\n\n\n\"mode\":\"standard\",\n\n\n\n\"shape\":\"classic\",\n\"pencolor\":\"black\",\n\"fillcolor\":\"black\",\n\n\"visible\":True ,\n\n\n\n\n\n\n\n\"turtle_canvas_wrapper\":None ,\n\"turtle_canvas_id\":\"turtle-canvas\",\n\"min_duration\":\"1ms\"\n}\n\n_cfg_copy=copy.copy(_CFG)\n\n\ndef set_defaults(**params):\n ''\n _CFG.update(**params)\n Screen().reset()\n \n \nclass FormattedTuple(tuple):\n ''\n def __new__(cls,x,y):\n return tuple.__new__(cls,(x,y))\n def __repr__(self):\n return \"(%.2f, %.2f)\"%self\n \ndef create_circle(r):\n ''\n circle=svg.circle(x=0,y=0,r=r,stroke=\"black\",fill=\"black\")\n circle.setAttribute(\"stroke-width\",1)\n return circle\n \n \ndef create_polygon(points):\n ''\n points=[\"%s,%s \"%(x,y)for x,y in points]\n polygon=svg.polygon(points=points,stroke=\"black\",fill=\"black\")\n polygon.setAttribute(\"stroke-width\",1)\n return polygon\n \n \ndef create_rectangle(width=2,height=2,rx=None ,ry=None ):\n ''\n \n rectangle=svg.rect(x=-width /2,y=-height /2,width=width,\n height=height,stroke=\"black\",fill=\"black\")\n rectangle.setAttribute(\"stroke-width\",1)\n if rx is not None :\n rectangle.setAttribute(\"rx\",rx)\n if ry is not None :\n rectangle.setAttribute(\"ry\",ry)\n return rectangle\n \n \ndef create_square(size=2,r=None ):\n ''\n \n return create_rectangle(width=size,height=size,rx=r,ry=r)\n \n \nclass TurtleGraphicsError(Exception):\n ''\n \n pass\n \n \nclass Singleton(type):\n _instances={}\n def __call__(cls,*args,**kwargs):\n if cls not in cls._instances:\n cls._instances[cls]=super(Singleton,cls).__call__(*args,**kwargs)\n return cls._instances[cls]\n \n \nclass Screen(metaclass=Singleton):\n\n def __init__(self):\n self.shapes={\n 'arrow':(create_polygon,((-10,0),(10,0),(0,10))),\n 'turtle':(create_polygon,((0,16),(-2,14),(-1,10),(-4,7),\n (-7,9),(-9,8),(-6,5),(-7,1),(-5,-3),(-8,-6),\n (-6,-8),(-4,-5),(0,-7),(4,-5),(6,-8),(8,-6),\n (5,-3),(7,1),(6,5),(9,8),(7,9),(4,7),(1,10),\n (2,14))),\n 'classic':(create_polygon,((0,0),(-5,-9),(0,-7),(5,-9))),\n 'triangle':(create_polygon,((10,-5.77),(0,11.55),(-10,-5.77))),\n 'square':(create_square,20),\n 'circle':(create_circle,10)\n }\n self.reset()\n self._set_geometry()\n \n def bgcolor(self,color=None ):\n ''\n\n \n if color is None :\n return self.background_color\n self.background_color=color\n width=_CFG['canvwidth']\n height=_CFG['canvheight']\n if self.mode()in ['logo','standard']:\n x=-width //2\n y=-height //2\n else :\n x=0\n y=-height\n \n self.frame_index +=1\n rect=svg.rect(x=x,y=y,width=width,height=height,fill=color,\n style={'display':'none'})\n an=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"display\",attributeType=\"CSS\",\n From=\"block\",to=\"block\",dur=_CFG[\"min_duration\"],\n fill='freeze')\n an.setAttribute('begin',\"animation_frame%s.end\"%(self.frame_index -1))\n rect <=an\n \n self.background_canvas <=rect\n \n def _convert_coordinates(self,x,y):\n ''\n\n\n\n\n\n \n return x *self.yscale,self.y_points_down *y *self.yscale\n \n \n def create_svg_turtle(self,_turtle,name):\n if name in self.shapes:\n fn=self.shapes[name][0]\n arg=self.shapes[name][1]\n else :\n print(\"Unknown turtle '%s'; the default turtle will be used\")\n fn=self.shapes[_CVG[\"shape\"]][0]\n arg=self.shapes[_CVG[\"shape\"]][1]\n shape=fn(arg)\n if self._mode =='standard'or self._mode =='world':\n rotation=-90\n else :\n rotation=0\n return shape,rotation\n \n def _dot(self,pos,size,color):\n ''\n if color is None :\n color='black'\n if size is None or size <1:\n size=1\n self.frame_index +=1\n \n x,y=self._convert_coordinates(pos[0],pos[1])\n \n circle=svg.circle(cx=x,cy=y,r=size,fill=color,\n style={'display':'none'})\n an=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"display\",attributeType=\"CSS\",\n From=\"block\",to=\"block\",dur=_CFG[\"min_duration\"],\n fill='freeze')\n an.setAttribute('begin',\"animation_frame%s.end\"%(self.frame_index -1))\n circle <=an\n self.canvas <=circle\n \n def _drawline(self,_turtle,coordlist=None ,\n color=None ,width=1,speed=None ):\n ''\n\n\n\n\n \n \n outline=color[0]\n fill=color[1]\n \n x0,y0=coordlist[0]\n x1,y1=coordlist[1]\n \n x0,y0=self._convert_coordinates(x0,y0)\n x1,y1=self._convert_coordinates(x1,y1)\n \n \n if speed ==0:\n duration=_CFG[\"min_duration\"]\n else :\n dist=_turtle._distance\n if speed is None or speed ==1:\n duration=0.02 *dist\n else :\n duration=0.02 *dist /speed **1.2\n if duration <0.001:\n duration=_CFG[\"min_duration\"]\n else :\n duration=\"%6.3fs\"%duration\n \n drawing=_turtle._drawing\n \n _line=svg.line(x1=x0,y1=y0,x2=x0,y2=y0,\n style={'stroke':outline,'stroke-width':width})\n if not drawing:\n _line.setAttribute('opacity',0)\n \n \n begin=\"animation_frame%s.end\"%self.frame_index\n self.frame_index +=1\n _an1=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"x2\",attributeType=\"XML\",\n From=x0,to=x1,dur=duration,fill='freeze',\n begin=begin)\n _line <=_an1\n \n \n if drawing:\n _an2=svg.animate(attributeName=\"y2\",attributeType=\"XML\",\n begin=begin,\n From=y0,to=y1,dur=duration,fill='freeze')\n _line <=_an2\n \n if width >2:\n _line_cap=svg.set(attributeName=\"stroke-linecap\",\n begin=begin,\n attributeType=\"xml\",to=\"round\",dur=duration,fill='freeze')\n _line <=_line_cap\n \n self.canvas <=_line\n return begin,duration,(x0,y0),(x1,y1)\n \n def _drawpoly(self,coordlist,outline=None ,fill=None ,width=None ):\n ''\n\n\n\n\n \n self.frame_index +=1\n shape=[\"%s,%s\"%self._convert_coordinates(x,y)for x,y in coordlist]\n \n style={'display':'none'}\n if fill is not None :\n style['fill']=fill\n if outline is not None :\n style['stroke']=outline\n if width is not None :\n style['stroke-width']=width\n else :\n style['stroke-width']=1\n \n polygon=svg.polygon(points=\" \".join(shape),style=style)\n \n an=svg.animate(Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"display\",attributeType=\"CSS\",\n From=\"block\",to=\"block\",dur=_CFG[\"min_duration\"],\n fill='freeze')\n \n an.setAttribute('begin',\"animation_frame%s.end\"%(self.frame_index -1))\n polygon <=an\n self.canvas <=polygon\n \n \n def _new_frame(self):\n ''\n \n previous_end=\"animation_frame%s.end\"%self.frame_index\n self.frame_index +=1\n new_frame_id=\"animation_frame%s\"%self.frame_index\n return previous_end,new_frame_id\n \n def mode(self,_mode=None ):\n if _mode is None :\n return self._mode\n _CFG['mode']=_mode\n self.reset()\n \n \n def reset(self):\n self._turtles=[]\n self.frame_index=0\n self.background_color=\"white\"\n self._set_geometry()\n \n def _set_geometry(self):\n self.width=_CFG[\"canvwidth\"]\n self.height=_CFG[\"canvheight\"]\n self.x_offset=self.y_offset=0\n self.xscale=self.yscale=1\n \n self.y_points_down=-1\n self._mode=_CFG[\"mode\"].lower()\n if self._mode in ['logo','standard']:\n self.translate_canvas=(self.width //2,self.height //2)\n elif self._mode =='world':\n self.translate_canvas=(0,self.height)\n self._setup_canvas()\n \n def _setup_canvas(self):\n self.svg_scene=svg.svg(Id=_CFG[\"turtle_canvas_id\"],width=self.width,\n height=self.height)\n translate=\"translate(%d %d)\"%self.translate_canvas\n \n \n self.svg_scene <=svg.animate(\n Id=\"animation_frame%s\"%self.frame_index,\n attributeName=\"width\",attributeType=\"CSS\",\n From=self.width,to=self.width,begin=\"0s\",\n dur=_CFG[\"min_duration\"],fill='freeze')\n \n \n \n \n self.background_canvas=svg.g(transform=translate)\n self.canvas=svg.g(transform=translate)\n self.writing_canvas=svg.g(transform=translate)\n self.turtle_canvas=svg.g(transform=translate)\n \n self.svg_scene <=self.background_canvas\n self.svg_scene <=self.canvas\n self.svg_scene <=self.writing_canvas\n self.svg_scene <=self.turtle_canvas\n \n \n def setworldcoordinates(self,llx,lly,urx,ury):\n ''\n\n\n\n\n\n\n\n\n\n\n \n self._mode=\"world\"\n \n if urx 2:\n self.screen._drawpoly(self._fillpath,outline=self._pencolor,\n fill=self._fillcolor,)\n else :\n print(\"No path to fill.\")\n self._fillpath=None\n \n def dot(self,size=None ,color=None ):\n ''\n \n item=self.screen._dot((self._x,self._y),size,color=color)\n \n def _write(self,txt,align,font,color=None ):\n ''\n \n if color is None :\n color=self._pencolor\n self.screen._write((self._x,self._y),txt,align,font,color)\n \n \n def write(self,arg,align=\"left\",font=(\"Arial\",8,\"normal\"),color=None ):\n ''\n\n\n\n\n\n\n \n self._write(str(arg),align.lower(),font,color=color)\n \n def begin_poly(self):\n ''\n \n self._poly=[(self._x,self._y)]\n self._creatingPoly=True\n \n def end_poly(self):\n ''\n \n self._creatingPoly=False\n \n def get_poly(self):\n ''\n \n \n if self._poly is not None :\n return tuple(self._poly)\n \n def getscreen(self):\n ''\n \n return self.screen\n \n def getturtle(self):\n ''\n\n\n \n return self\n getpen=getturtle\n \n def _make_copy(self,name=None ):\n ''\n\n \n \n if name is None :\n name=self.name\n \n \n \n \n \n _turtle,rotation=self.screen.create_svg_turtle(self,name=name)\n _turtle.setAttribute(\"opacity\",0)\n _turtle.setAttribute(\"fill\",self._fillcolor)\n _turtle.setAttribute(\"stroke\",self._pencolor)\n \n \n \n previous_end,new_frame_id=self.screen._new_frame()\n x,y=self.screen._convert_coordinates(self._x,self._y)\n _turtle <=svg.animateMotion(begin=previous_end,dur=_CFG[\"min_duration\"],\n fill=\"remove\")\n \n _turtle <=svg.animateMotion(Id=new_frame_id,\n From=\"%s,%s\"%(x,y),to=\"%s,%s\"%(x,y),\n dur=_CFG[\"min_duration\"],begin=previous_end,\n fill=\"freeze\")\n _turtle <=svg.animateTransform(attributeName=\"transform\",\n type=\"rotate\",\n From=(self._old_heading,0,0),\n to=(self._old_heading,0,0),\n begin=previous_end,\n dur=_CFG[\"min_duration\"],fill=\"freeze\")\n _turtle <=svg.animate(begin=previous_end,\n dur=_CFG[\"min_duration\"],fill=\"freeze\",\n attributeName=\"opacity\",attributeType=\"XML\",\n From=0,to=1)\n return _turtle\n \n def stamp(self):\n ''\n \n _turtle=self._make_copy(name=self.name)\n self.screen.canvas <=_turtle\n \n \n def clone(self):\n ''\n \n n=Turtle(self.name)\n \n attrs=vars(self)\n new_dict={}\n for attr in attrs:\n if isinstance(getattr(self,attr),(int,str,float)):\n new_dict[attr]=getattr(self,attr)\n n.__dict__.update(**new_dict)\n \n if not n._shown:\n n._shown=True\n n.hideturtle()\n n.left(0)\n n.fd(0)\n n.color(n.color())\n return n\n \n \nPen=Turtle\n\n\ndef done():\n Screen().show_scene()\nshow_scene=done\n\n\ndef replay_scene():\n ''\n if (_CFG[\"turtle_canvas_id\"]in document and\n document[_CFG[\"turtle_canvas_id\"]]is not None ):\n element=document[_CFG[\"turtle_canvas_id\"]]\n element.parentNode.removeChild(element)\n show_scene()\n \n \ndef restart():\n ''\n _CFG.update(_cfg_copy)\n Screen().reset()\n Turtle._pen=None\n \n if (_CFG[\"turtle_canvas_id\"]in document and\n document[_CFG[\"turtle_canvas_id\"]]is not None ):\n element=document[_CFG[\"turtle_canvas_id\"]]\n element.parentNode.removeChild(element)\n \n \n \nimport inspect\ndef getmethparlist(ob):\n ''\n\n\n\n\n\n \n defText=callText=\"\"\n \n \n \n args,varargs,varkw=inspect.getargs(ob.__code__)\n items2=args[1:]\n realArgs=args[1:]\n defaults=ob.__defaults__ or []\n defaults=[\"=%r\"%(value,)for value in defaults]\n defaults=[\"\"]*(len(realArgs)-len(defaults))+defaults\n items1=[arg+dflt for arg,dflt in zip(realArgs,defaults)]\n if varargs is not None :\n items1.append(\"*\"+varargs)\n items2.append(\"*\"+varargs)\n if varkw is not None :\n items1.append(\"**\"+varkw)\n items2.append(\"**\"+varkw)\n defText=\", \".join(items1)\n defText=\"(%s)\"%defText\n callText=\", \".join(items2)\n callText=\"(%s)\"%callText\n return defText,callText\n \n_tg_screen_functions=['addshape','bgcolor','bgpic','bye',\n'clearscreen','colormode','delay','exitonclick','getcanvas',\n'getshapes','listen','mainloop','mode','numinput',\n'onkey','onkeypress','onkeyrelease','onscreenclick','ontimer',\n'register_shape','resetscreen','screensize','setup',\n'setworldcoordinates','textinput','title','tracer','turtles','update',\n'window_height','window_width']\n\n_tg_turtle_functions=['back','backward','begin_fill','begin_poly','bk',\n'circle','clear','clearstamp','clearstamps','clone','color',\n'degrees','distance','dot','down','end_fill','end_poly','fd',\n'fillcolor','filling','forward','get_poly','getpen','getscreen','get_shapepoly',\n'getturtle','goto','heading','hideturtle','home','ht','isdown',\n'isvisible','left','lt','onclick','ondrag','onrelease','pd',\n'pen','pencolor','pendown','pensize','penup','pos','position',\n'pu','radians','right','reset','resizemode','rt',\n'seth','setheading','setpos','setposition','settiltangle',\n'setundobuffer','setx','sety','shape','shapesize','shapetransform','shearfactor','showturtle',\n'speed','st','stamp','tilt','tiltangle','towards',\n'turtlesize','undo','undobufferentries','up','width',\n'write','xcor','ycor']\n\n\n__all__=(_tg_screen_functions+_tg_turtle_functions+\n['done','restart','replay_scene','Turtle','Screen'])\n\n\n\n\n\n__func_body=\"\"\"\\\ndef {name}{paramslist}:\n if {obj} is None:\n {obj} = {init}\n return {obj}.{name}{argslist}\n\"\"\"\ndef _make_global_funcs(functions,cls,obj,init):\n for methodname in functions:\n try :\n method=getattr(cls,methodname)\n except AttributeError:\n print(\"methodname missing:\",methodname)\n continue\n pl1,pl2=getmethparlist(method)\n defstr=__func_body.format(obj=obj,init=init,name=methodname,\n paramslist=pl1,argslist=pl2)\n exec(defstr,globals())\n \n_make_global_funcs(_tg_turtle_functions,Turtle,'Turtle._pen','Turtle()')\n\n_make_global_funcs(_tg_screen_functions,Screen,'Turtle.screen','Screen()')\n", ["_svg", "browser", "browser.html", "browser.timer", "copy", "inspect", "math", "sys"]], "types": [".py", "''\n\n\nimport sys\n\n\n\n\n\n\ndef _f():pass\nFunctionType=type(_f)\nLambdaType=type(lambda :None )\nCodeType=type(_f.__code__)\nMappingProxyType=type(type.__dict__)\nSimpleNamespace=type(sys.implementation)\n\ndef _cell_factory():\n a=1\n def f():\n nonlocal a\n return f.__closure__[0]\nCellType=type(_cell_factory())\n\ndef _g():\n yield 1\nGeneratorType=type(_g())\n\nasync def _c():pass\n_c=_c()\nCoroutineType=type(_c)\n_c.close()\n\nasync def _ag():\n yield\n_ag=_ag()\nAsyncGeneratorType=type(_ag)\n\nclass _C:\n def _m(self):pass\nMethodType=type(_C()._m)\n\nBuiltinFunctionType=type(len)\nBuiltinMethodType=type([].append)\n\nWrapperDescriptorType=type(object.__init__)\nMethodWrapperType=type(object().__str__)\nMethodDescriptorType=type(str.join)\nClassMethodDescriptorType=type(dict.__dict__['fromkeys'])\n\nModuleType=type(sys)\n\ntry :\n raise TypeError\nexcept TypeError:\n tb=sys.exc_info()[2]\n TracebackType=type(tb)\n FrameType=type(tb.tb_frame)\n tb=None ;del tb\n \n \nGetSetDescriptorType=type(FunctionType.__code__)\nMemberDescriptorType=type(FunctionType.__globals__)\n\ndel sys,_f,_g,_C,_c,_ag\n\n\n\ndef new_class(name,bases=(),kwds=None ,exec_body=None ):\n ''\n resolved_bases=resolve_bases(bases)\n meta,ns,kwds=prepare_class(name,resolved_bases,kwds)\n if exec_body is not None :\n exec_body(ns)\n if resolved_bases is not bases:\n ns['__orig_bases__']=bases\n return meta(name,resolved_bases,ns,**kwds)\n \ndef resolve_bases(bases):\n ''\n new_bases=list(bases)\n updated=False\n shift=0\n for i,base in enumerate(bases):\n if isinstance(base,type):\n continue\n if not hasattr(base,\"__mro_entries__\"):\n continue\n new_base=base.__mro_entries__(bases)\n updated=True\n if not isinstance(new_base,tuple):\n raise TypeError(\"__mro_entries__ must return a tuple\")\n else :\n new_bases[i+shift:i+shift+1]=new_base\n shift +=len(new_base)-1\n if not updated:\n return bases\n return tuple(new_bases)\n \ndef prepare_class(name,bases=(),kwds=None ):\n ''\n\n\n\n\n\n\n\n\n \n if kwds is None :\n kwds={}\n else :\n kwds=dict(kwds)\n if 'metaclass'in kwds:\n meta=kwds.pop('metaclass')\n else :\n if bases:\n meta=type(bases[0])\n else :\n meta=type\n if isinstance(meta,type):\n \n \n meta=_calculate_meta(meta,bases)\n if hasattr(meta,'__prepare__'):\n ns=meta.__prepare__(name,bases,**kwds)\n else :\n ns={}\n return meta,ns,kwds\n \ndef _calculate_meta(meta,bases):\n ''\n winner=meta\n for base in bases:\n base_meta=type(base)\n if issubclass(winner,base_meta):\n continue\n if issubclass(base_meta,winner):\n winner=base_meta\n continue\n \n raise TypeError(\"metaclass conflict: \"\n \"the metaclass of a derived class \"\n \"must be a (non-strict) subclass \"\n \"of the metaclasses of all its bases\")\n return winner\n \nclass DynamicClassAttribute:\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,fget=None ,fset=None ,fdel=None ,doc=None ):\n self.fget=fget\n self.fset=fset\n self.fdel=fdel\n \n self.__doc__=doc or fget.__doc__\n self.overwrite_doc=doc is None\n \n self.__isabstractmethod__=bool(getattr(fget,'__isabstractmethod__',False ))\n \n def __get__(self,instance,ownerclass=None ):\n if instance is None :\n if self.__isabstractmethod__:\n return self\n raise AttributeError()\n elif self.fget is None :\n raise AttributeError(\"unreadable attribute\")\n return self.fget(instance)\n \n def __set__(self,instance,value):\n if self.fset is None :\n raise AttributeError(\"can't set attribute\")\n self.fset(instance,value)\n \n def __delete__(self,instance):\n if self.fdel is None :\n raise AttributeError(\"can't delete attribute\")\n self.fdel(instance)\n \n def getter(self,fget):\n fdoc=fget.__doc__ if self.overwrite_doc else None\n result=type(self)(fget,self.fset,self.fdel,fdoc or self.__doc__)\n result.overwrite_doc=self.overwrite_doc\n return result\n \n def setter(self,fset):\n result=type(self)(self.fget,fset,self.fdel,self.__doc__)\n result.overwrite_doc=self.overwrite_doc\n return result\n \n def deleter(self,fdel):\n result=type(self)(self.fget,self.fset,fdel,self.__doc__)\n result.overwrite_doc=self.overwrite_doc\n return result\n \n \nclass _GeneratorWrapper:\n\n def __init__(self,gen):\n self.__wrapped=gen\n self.__isgen=gen.__class__ is GeneratorType\n self.__name__=getattr(gen,'__name__',None )\n self.__qualname__=getattr(gen,'__qualname__',None )\n def send(self,val):\n return self.__wrapped.send(val)\n def throw(self,tp,*rest):\n return self.__wrapped.throw(tp,*rest)\n def close(self):\n return self.__wrapped.close()\n @property\n def gi_code(self):\n return self.__wrapped.gi_code\n @property\n def gi_frame(self):\n return self.__wrapped.gi_frame\n @property\n def gi_running(self):\n return self.__wrapped.gi_running\n @property\n def gi_yieldfrom(self):\n return self.__wrapped.gi_yieldfrom\n cr_code=gi_code\n cr_frame=gi_frame\n cr_running=gi_running\n cr_await=gi_yieldfrom\n def __next__(self):\n return next(self.__wrapped)\n def __iter__(self):\n if self.__isgen:\n return self.__wrapped\n return self\n __await__=__iter__\n \ndef coroutine(func):\n ''\n \n if not callable(func):\n raise TypeError('types.coroutine() expects a callable')\n \n if (func.__class__ is FunctionType and\n getattr(func,'__code__',None ).__class__ is CodeType):\n \n co_flags=func.__code__.co_flags\n \n \n \n if co_flags&0x180:\n return func\n \n \n \n if co_flags&0x20:\n \n co=func.__code__\n \n func.__code__=co.replace(co_flags=co.co_flags |0x100)\n return func\n \n \n \n \n \n \n import functools\n import _collections_abc\n @functools.wraps(func)\n def wrapped(*args,**kwargs):\n coro=func(*args,**kwargs)\n if (coro.__class__ is CoroutineType or\n coro.__class__ is GeneratorType and coro.gi_code.co_flags&0x100):\n \n return coro\n if (isinstance(coro,_collections_abc.Generator)and\n not isinstance(coro,_collections_abc.Coroutine)):\n \n \n \n return _GeneratorWrapper(coro)\n \n \n return coro\n \n return wrapped\n \n \nGenericAlias=type(list[int])\n\n\n__all__=[n for n in globals()if n[:1]!='_']\n", ["_collections_abc", "functools", "sys"]], "typing": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nfrom abc import abstractmethod,ABCMeta\nimport collections\nimport collections.abc\nimport contextlib\nimport functools\nimport operator\nimport re as stdlib_re\nimport sys\nimport types\nfrom types import WrapperDescriptorType,MethodWrapperType,MethodDescriptorType,GenericAlias\n\n\n__all__=[\n\n'Annotated',\n'Any',\n'Callable',\n'ClassVar',\n'Final',\n'ForwardRef',\n'Generic',\n'Literal',\n'Optional',\n'Protocol',\n'Tuple',\n'Type',\n'TypeVar',\n'Union',\n\n\n'AbstractSet',\n'ByteString',\n'Container',\n'ContextManager',\n'Hashable',\n'ItemsView',\n'Iterable',\n'Iterator',\n'KeysView',\n'Mapping',\n'MappingView',\n'MutableMapping',\n'MutableSequence',\n'MutableSet',\n'Sequence',\n'Sized',\n'ValuesView',\n'Awaitable',\n'AsyncIterator',\n'AsyncIterable',\n'Coroutine',\n'Collection',\n'AsyncGenerator',\n'AsyncContextManager',\n\n\n'Reversible',\n'SupportsAbs',\n'SupportsBytes',\n'SupportsComplex',\n'SupportsFloat',\n'SupportsIndex',\n'SupportsInt',\n'SupportsRound',\n\n\n'ChainMap',\n'Counter',\n'Deque',\n'Dict',\n'DefaultDict',\n'List',\n'OrderedDict',\n'Set',\n'FrozenSet',\n'NamedTuple',\n'TypedDict',\n'Generator',\n\n\n'AnyStr',\n'cast',\n'final',\n'get_args',\n'get_origin',\n'get_type_hints',\n'NewType',\n'no_type_check',\n'no_type_check_decorator',\n'NoReturn',\n'overload',\n'runtime_checkable',\n'Text',\n'TYPE_CHECKING',\n]\n\n\n\n\n\n\ndef _type_check(arg,msg,is_argument=True ):\n ''\n\n\n\n\n\n\n\n\n\n \n invalid_generic_forms=(Generic,Protocol)\n if is_argument:\n invalid_generic_forms=invalid_generic_forms+(ClassVar,Final)\n \n if arg is None :\n return type(None )\n if isinstance(arg,str):\n return ForwardRef(arg)\n if (isinstance(arg,_GenericAlias)and\n arg.__origin__ in invalid_generic_forms):\n raise TypeError(f\"{arg} is not valid as type argument\")\n if arg in (Any,NoReturn):\n return arg\n if isinstance(arg,_SpecialForm)or arg in (Generic,Protocol):\n raise TypeError(f\"Plain {arg} is not valid as type argument\")\n if isinstance(arg,(type,TypeVar,ForwardRef)):\n return arg\n if not callable(arg):\n raise TypeError(f\"{msg} Got {arg!r:.100}.\")\n return arg\n \n \ndef _type_repr(obj):\n ''\n\n\n\n\n\n \n if isinstance(obj,type):\n if obj.__module__ =='builtins':\n return obj.__qualname__\n return f'{obj.__module__}.{obj.__qualname__}'\n if obj is ...:\n return ('...')\n if isinstance(obj,types.FunctionType):\n return obj.__name__\n return repr(obj)\n \n \ndef _collect_type_vars(types):\n ''\n\n\n\n \n tvars=[]\n for t in types:\n if isinstance(t,TypeVar)and t not in tvars:\n tvars.append(t)\n if isinstance(t,(_GenericAlias,GenericAlias)):\n tvars.extend([t for t in t.__parameters__ if t not in tvars])\n return tuple(tvars)\n \n \ndef _check_generic(cls,parameters,elen):\n ''\n\n \n if not elen:\n raise TypeError(f\"{cls} is not a generic class\")\n alen=len(parameters)\n if alen !=elen:\n raise TypeError(f\"Too {'many' if alen > elen else 'few'} parameters for {cls};\"\n f\" actual {alen}, expected {elen}\")\n \n \ndef _remove_dups_flatten(parameters):\n ''\n\n \n \n params=[]\n for p in parameters:\n if isinstance(p,_UnionGenericAlias):\n params.extend(p.__args__)\n elif isinstance(p,tuple)and len(p)>0 and p[0]is Union:\n params.extend(p[1:])\n else :\n params.append(p)\n \n all_params=set(params)\n if len(all_params)','eval')\n except SyntaxError:\n raise SyntaxError(f\"Forward reference must be an expression -- got {arg!r}\")\n self.__forward_arg__=arg\n self.__forward_code__=code\n self.__forward_evaluated__=False\n self.__forward_value__=None\n self.__forward_is_argument__=is_argument\n \n def _evaluate(self,globalns,localns,recursive_guard):\n if self.__forward_arg__ in recursive_guard:\n return self\n if not self.__forward_evaluated__ or localns is not globalns:\n if globalns is None and localns is None :\n globalns=localns={}\n elif globalns is None :\n globalns=localns\n elif localns is None :\n localns=globalns\n type_=_type_check(\n eval(self.__forward_code__,globalns,localns),\n \"Forward references must evaluate to types.\",\n is_argument=self.__forward_is_argument__,\n )\n self.__forward_value__=_eval_type(\n type_,globalns,localns,recursive_guard |{self.__forward_arg__}\n )\n self.__forward_evaluated__=True\n return self.__forward_value__\n \n def __eq__(self,other):\n if not isinstance(other,ForwardRef):\n return NotImplemented\n if self.__forward_evaluated__ and other.__forward_evaluated__:\n return (self.__forward_arg__ ==other.__forward_arg__ and\n self.__forward_value__ ==other.__forward_value__)\n return self.__forward_arg__ ==other.__forward_arg__\n \n def __hash__(self):\n return hash(self.__forward_arg__)\n \n def __repr__(self):\n return f'ForwardRef({self.__forward_arg__!r})'\n \n \nclass TypeVar(_Final,_Immutable,_root=True ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('__name__','__bound__','__constraints__',\n '__covariant__','__contravariant__','__dict__')\n \n def __init__(self,name,*constraints,bound=None ,\n covariant=False ,contravariant=False ):\n self.__name__=name\n if covariant and contravariant:\n raise ValueError(\"Bivariant types are not supported.\")\n self.__covariant__=bool(covariant)\n self.__contravariant__=bool(contravariant)\n if constraints and bound is not None :\n raise TypeError(\"Constraints cannot be combined with bound=...\")\n if constraints and len(constraints)==1:\n raise TypeError(\"A single constraint is not allowed\")\n msg=\"TypeVar(name, constraint, ...): constraints must be types.\"\n self.__constraints__=tuple(_type_check(t,msg)for t in constraints)\n if bound:\n self.__bound__=_type_check(bound,\"Bound must be a type.\")\n else :\n self.__bound__=None\n try :\n def_mod=sys._getframe(1).f_globals.get('__name__','__main__')\n except (AttributeError,ValueError):\n def_mod=None\n if def_mod !='typing':\n self.__module__=def_mod\n \n def __repr__(self):\n if self.__covariant__:\n prefix='+'\n elif self.__contravariant__:\n prefix='-'\n else :\n prefix='~'\n return prefix+self.__name__\n \n def __reduce__(self):\n return self.__name__\n \n \ndef _is_dunder(attr):\n return attr.startswith('__')and attr.endswith('__')\n \nclass _BaseGenericAlias(_Final,_root=True ):\n ''\n\n\n\n\n\n\n \n def __init__(self,origin,*,inst=True ,name=None ):\n self._inst=inst\n self._name=name\n self.__origin__=origin\n self.__slots__=None\n \n def __call__(self,*args,**kwargs):\n if not self._inst:\n raise TypeError(f\"Type {self._name} cannot be instantiated; \"\n f\"use {self.__origin__.__name__}() instead\")\n result=self.__origin__(*args,**kwargs)\n try :\n result.__orig_class__=self\n except AttributeError:\n pass\n return result\n \n def __mro_entries__(self,bases):\n res=[]\n if self.__origin__ not in bases:\n res.append(self.__origin__)\n i=bases.index(self)\n for b in bases[i+1:]:\n if isinstance(b,_BaseGenericAlias)or issubclass(b,Generic):\n break\n else :\n res.append(Generic)\n return tuple(res)\n \n def __getattr__(self,attr):\n \n \n if '__origin__'in self.__dict__ and not _is_dunder(attr):\n return getattr(self.__origin__,attr)\n raise AttributeError(attr)\n \n def __setattr__(self,attr,val):\n if _is_dunder(attr)or attr in ('_name','_inst','_nparams'):\n super().__setattr__(attr,val)\n else :\n setattr(self.__origin__,attr,val)\n \n def __instancecheck__(self,obj):\n return self.__subclasscheck__(type(obj))\n \n def __subclasscheck__(self,cls):\n raise TypeError(\"Subscripted generics cannot be used with\"\n \" class and instance checks\")\n \n \n \n \n \n \n \n \n \n \n \n \n \nclass _GenericAlias(_BaseGenericAlias,_root=True ):\n def __init__(self,origin,params,*,inst=True ,name=None ):\n super().__init__(origin,inst=inst,name=name)\n if not isinstance(params,tuple):\n params=(params,)\n self.__args__=tuple(...if a is _TypingEllipsis else\n ()if a is _TypingEmpty else\n a for a in params)\n self.__parameters__=_collect_type_vars(params)\n if not name:\n self.__module__=origin.__module__\n \n def __eq__(self,other):\n if not isinstance(other,_GenericAlias):\n return NotImplemented\n return (self.__origin__ ==other.__origin__\n and self.__args__ ==other.__args__)\n \n def __hash__(self):\n return hash((self.__origin__,self.__args__))\n \n @_tp_cache\n def __getitem__(self,params):\n if self.__origin__ in (Generic,Protocol):\n \n raise TypeError(f\"Cannot subscript already-subscripted {self}\")\n if not isinstance(params,tuple):\n params=(params,)\n msg=\"Parameters to generic types must be types.\"\n params=tuple(_type_check(p,msg)for p in params)\n _check_generic(self,params,len(self.__parameters__))\n \n subst=dict(zip(self.__parameters__,params))\n new_args=[]\n for arg in self.__args__:\n if isinstance(arg,TypeVar):\n arg=subst[arg]\n elif isinstance(arg,(_GenericAlias,GenericAlias)):\n subparams=arg.__parameters__\n if subparams:\n subargs=tuple(subst[x]for x in subparams)\n arg=arg[subargs]\n new_args.append(arg)\n return self.copy_with(tuple(new_args))\n \n def copy_with(self,params):\n return self.__class__(self.__origin__,params,name=self._name,inst=self._inst)\n \n def __repr__(self):\n if self._name:\n name='typing.'+self._name\n else :\n name=_type_repr(self.__origin__)\n args=\", \".join([_type_repr(a)for a in self.__args__])\n return f'{name}[{args}]'\n \n def __reduce__(self):\n if self._name:\n origin=globals()[self._name]\n else :\n origin=self.__origin__\n args=tuple(self.__args__)\n if len(args)==1 and not isinstance(args[0],tuple):\n args,=args\n return operator.getitem,(origin,args)\n \n def __mro_entries__(self,bases):\n if self._name:\n return super().__mro_entries__(bases)\n if self.__origin__ is Generic:\n if Protocol in bases:\n return ()\n i=bases.index(self)\n for b in bases[i+1:]:\n if isinstance(b,_BaseGenericAlias)and b is not self:\n return ()\n return (self.__origin__,)\n \n \n \n \n \n \nclass _SpecialGenericAlias(_BaseGenericAlias,_root=True ):\n def __init__(self,origin,nparams,*,inst=True ,name=None ):\n if name is None :\n name=origin.__name__\n super().__init__(origin,inst=inst,name=name)\n self._nparams=nparams\n if origin.__module__ =='builtins':\n self.__doc__=f'A generic version of {origin.__qualname__}.'\n else :\n self.__doc__=f'A generic version of {origin.__module__}.{origin.__qualname__}.'\n \n @_tp_cache\n def __getitem__(self,params):\n if not isinstance(params,tuple):\n params=(params,)\n msg=\"Parameters to generic types must be types.\"\n params=tuple(_type_check(p,msg)for p in params)\n _check_generic(self,params,self._nparams)\n return self.copy_with(params)\n \n def copy_with(self,params):\n return _GenericAlias(self.__origin__,params,\n name=self._name,inst=self._inst)\n \n def __repr__(self):\n return 'typing.'+self._name\n \n def __subclasscheck__(self,cls):\n if isinstance(cls,_SpecialGenericAlias):\n return issubclass(cls.__origin__,self.__origin__)\n if not isinstance(cls,_GenericAlias):\n return issubclass(cls,self.__origin__)\n return super().__subclasscheck__(cls)\n \n def __reduce__(self):\n return self._name\n \n \nclass _CallableGenericAlias(_GenericAlias,_root=True ):\n def __repr__(self):\n assert self._name =='Callable'\n if len(self.__args__)==2 and self.__args__[0]is Ellipsis:\n return super().__repr__()\n return (f'typing.Callable'\n f'[[{\", \".join([_type_repr(a) for a in self.__args__[:-1]])}], '\n f'{_type_repr(self.__args__[-1])}]')\n \n def __reduce__(self):\n args=self.__args__\n if not (len(args)==2 and args[0]is ...):\n args=list(args[:-1]),args[-1]\n return operator.getitem,(Callable,args)\n \n \nclass _CallableType(_SpecialGenericAlias,_root=True ):\n def copy_with(self,params):\n return _CallableGenericAlias(self.__origin__,params,\n name=self._name,inst=self._inst)\n \n def __getitem__(self,params):\n if not isinstance(params,tuple)or len(params)!=2:\n raise TypeError(\"Callable must be used as \"\n \"Callable[[arg, ...], result].\")\n args,result=params\n if args is Ellipsis:\n params=(Ellipsis,result)\n else :\n if not isinstance(args,list):\n raise TypeError(f\"Callable[args, result]: args must be a list.\"\n f\" Got {args}\")\n params=(tuple(args),result)\n return self.__getitem_inner__(params)\n \n @_tp_cache\n def __getitem_inner__(self,params):\n args,result=params\n msg=\"Callable[args, result]: result must be a type.\"\n result=_type_check(result,msg)\n if args is Ellipsis:\n return self.copy_with((_TypingEllipsis,result))\n msg=\"Callable[[arg, ...], result]: each arg must be a type.\"\n args=tuple(_type_check(arg,msg)for arg in args)\n params=args+(result,)\n return self.copy_with(params)\n \n \nclass _TupleType(_SpecialGenericAlias,_root=True ):\n @_tp_cache\n def __getitem__(self,params):\n if params ==():\n return self.copy_with((_TypingEmpty,))\n if not isinstance(params,tuple):\n params=(params,)\n if len(params)==2 and params[1]is ...:\n msg=\"Tuple[t, ...]: t must be a type.\"\n p=_type_check(params[0],msg)\n return self.copy_with((p,_TypingEllipsis))\n msg=\"Tuple[t0, t1, ...]: each t must be a type.\"\n params=tuple(_type_check(p,msg)for p in params)\n return self.copy_with(params)\n \n \nclass _UnionGenericAlias(_GenericAlias,_root=True ):\n def copy_with(self,params):\n return Union[params]\n \n def __eq__(self,other):\n if not isinstance(other,_UnionGenericAlias):\n return NotImplemented\n return set(self.__args__)==set(other.__args__)\n \n def __hash__(self):\n return hash(frozenset(self.__args__))\n \n def __repr__(self):\n args=self.__args__\n if len(args)==2:\n if args[0]is type(None ):\n return f'typing.Optional[{_type_repr(args[1])}]'\n elif args[1]is type(None ):\n return f'typing.Optional[{_type_repr(args[0])}]'\n return super().__repr__()\n \n \nclass Generic:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n __slots__=()\n _is_protocol=False\n \n @_tp_cache\n def __class_getitem__(cls,params):\n if not isinstance(params,tuple):\n params=(params,)\n if not params and cls is not Tuple:\n raise TypeError(\n f\"Parameter list to {cls.__qualname__}[...] cannot be empty\")\n msg=\"Parameters to generic types must be types.\"\n params=tuple(_type_check(p,msg)for p in params)\n if cls in (Generic,Protocol):\n \n if not all(isinstance(p,TypeVar)for p in params):\n raise TypeError(\n f\"Parameters to {cls.__name__}[...] must all be type variables\")\n if len(set(params))!=len(params):\n raise TypeError(\n f\"Parameters to {cls.__name__}[...] must all be unique\")\n else :\n \n _check_generic(cls,params,len(cls.__parameters__))\n return _GenericAlias(cls,params)\n \n def __init_subclass__(cls,*args,**kwargs):\n super().__init_subclass__(*args,**kwargs)\n tvars=[]\n if '__orig_bases__'in cls.__dict__:\n error=Generic in cls.__orig_bases__\n else :\n error=Generic in cls.__bases__ and cls.__name__ !='Protocol'\n if error:\n raise TypeError(\"Cannot inherit from plain Generic\")\n if '__orig_bases__'in cls.__dict__:\n tvars=_collect_type_vars(cls.__orig_bases__)\n \n \n \n \n \n gvars=None\n for base in cls.__orig_bases__:\n if (isinstance(base,_GenericAlias)and\n base.__origin__ is Generic):\n if gvars is not None :\n raise TypeError(\n \"Cannot inherit from Generic[...] multiple types.\")\n gvars=base.__parameters__\n if gvars is not None :\n tvarset=set(tvars)\n gvarset=set(gvars)\n if not tvarset <=gvarset:\n s_vars=', '.join(str(t)for t in tvars if t not in gvarset)\n s_args=', '.join(str(g)for g in gvars)\n raise TypeError(f\"Some type variables ({s_vars}) are\"\n f\" not listed in Generic[{s_args}]\")\n tvars=gvars\n cls.__parameters__=tuple(tvars)\n \n \nclass _TypingEmpty:\n ''\n\n\n \n \n \nclass _TypingEllipsis:\n ''\n \n \n_TYPING_INTERNALS=['__parameters__','__orig_bases__','__orig_class__',\n'_is_protocol','_is_runtime_protocol']\n\n_SPECIAL_NAMES=['__abstractmethods__','__annotations__','__dict__','__doc__',\n'__init__','__module__','__new__','__slots__',\n'__subclasshook__','__weakref__','__class_getitem__']\n\n\nEXCLUDED_ATTRIBUTES=_TYPING_INTERNALS+_SPECIAL_NAMES+['_MutableMapping__marker']\n\n\ndef _get_protocol_attrs(cls):\n ''\n\n\n\n \n attrs=set()\n for base in cls.__mro__[:-1]:\n if base.__name__ in ('Protocol','Generic'):\n continue\n annotations=getattr(base,'__annotations__',{})\n for attr in list(base.__dict__.keys())+list(annotations.keys()):\n if not attr.startswith('_abc_')and attr not in EXCLUDED_ATTRIBUTES:\n attrs.add(attr)\n return attrs\n \n \ndef _is_callable_members_only(cls):\n\n return all(callable(getattr(cls,attr,None ))for attr in _get_protocol_attrs(cls))\n \n \ndef _no_init(self,*args,**kwargs):\n if type(self)._is_protocol:\n raise TypeError('Protocols cannot be instantiated')\n \n \ndef _allow_reckless_class_cheks():\n ''\n\n\n\n \n try :\n return sys._getframe(3).f_globals['__name__']in ['abc','functools']\n except (AttributeError,ValueError):\n return True\n \n \n_PROTO_WHITELIST={\n'collections.abc':[\n'Callable','Awaitable','Iterable','Iterator','AsyncIterable',\n'Hashable','Sized','Container','Collection','Reversible',\n],\n'contextlib':['AbstractContextManager','AbstractAsyncContextManager'],\n}\n\n\nclass _ProtocolMeta(ABCMeta):\n\n\n def __instancecheck__(cls,instance):\n \n \n if ((not getattr(cls,'_is_protocol',False )or\n _is_callable_members_only(cls))and\n issubclass(instance.__class__,cls)):\n return True\n if cls._is_protocol:\n if all(hasattr(instance,attr)and\n \n (not callable(getattr(cls,attr,None ))or\n getattr(instance,attr)is not None )\n for attr in _get_protocol_attrs(cls)):\n return True\n return super().__instancecheck__(instance)\n \n \nclass Protocol(Generic,metaclass=_ProtocolMeta):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n __slots__=()\n _is_protocol=True\n _is_runtime_protocol=False\n \n def __init_subclass__(cls,*args,**kwargs):\n super().__init_subclass__(*args,**kwargs)\n \n \n if not cls.__dict__.get('_is_protocol',False ):\n cls._is_protocol=any(b is Protocol for b in cls.__bases__)\n \n \n def _proto_hook(other):\n if not cls.__dict__.get('_is_protocol',False ):\n return NotImplemented\n \n \n if not getattr(cls,'_is_runtime_protocol',False ):\n if _allow_reckless_class_cheks():\n return NotImplemented\n raise TypeError(\"Instance and class checks can only be used with\"\n \" @runtime_checkable protocols\")\n if not _is_callable_members_only(cls):\n if _allow_reckless_class_cheks():\n return NotImplemented\n raise TypeError(\"Protocols with non-method members\"\n \" don't support issubclass()\")\n if not isinstance(other,type):\n \n raise TypeError('issubclass() arg 1 must be a class')\n \n \n for attr in _get_protocol_attrs(cls):\n for base in other.__mro__:\n \n if attr in base.__dict__:\n if base.__dict__[attr]is None :\n return NotImplemented\n break\n \n \n annotations=getattr(base,'__annotations__',{})\n if (isinstance(annotations,collections.abc.Mapping)and\n attr in annotations and\n issubclass(other,Generic)and other._is_protocol):\n break\n else :\n return NotImplemented\n return True\n \n if '__subclasshook__'not in cls.__dict__:\n cls.__subclasshook__=_proto_hook\n \n \n if not cls._is_protocol:\n return\n \n \n for base in cls.__bases__:\n if not (base in (object,Generic)or\n base.__module__ in _PROTO_WHITELIST and\n base.__name__ in _PROTO_WHITELIST[base.__module__]or\n issubclass(base,Generic)and base._is_protocol):\n raise TypeError('Protocols can only inherit from other'\n ' protocols, got %r'%base)\n cls.__init__=_no_init\n \n \nclass _AnnotatedAlias(_GenericAlias,_root=True ):\n ''\n\n\n\n\n\n \n def __init__(self,origin,metadata):\n if isinstance(origin,_AnnotatedAlias):\n metadata=origin.__metadata__+metadata\n origin=origin.__origin__\n super().__init__(origin,origin)\n self.__metadata__=metadata\n \n def copy_with(self,params):\n assert len(params)==1\n new_type=params[0]\n return _AnnotatedAlias(new_type,self.__metadata__)\n \n def __repr__(self):\n return \"typing.Annotated[{}, {}]\".format(\n _type_repr(self.__origin__),\n \", \".join(repr(a)for a in self.__metadata__)\n )\n \n def __reduce__(self):\n return operator.getitem,(\n Annotated,(self.__origin__,)+self.__metadata__\n )\n \n def __eq__(self,other):\n if not isinstance(other,_AnnotatedAlias):\n return NotImplemented\n return (self.__origin__ ==other.__origin__\n and self.__metadata__ ==other.__metadata__)\n \n def __hash__(self):\n return hash((self.__origin__,self.__metadata__))\n \n \nclass Annotated:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n def __new__(cls,*args,**kwargs):\n raise TypeError(\"Type Annotated cannot be instantiated.\")\n \n @_tp_cache\n def __class_getitem__(cls,params):\n if not isinstance(params,tuple)or len(params)<2:\n raise TypeError(\"Annotated[...] should be used \"\n \"with at least two arguments (a type and an \"\n \"annotation).\")\n msg=\"Annotated[t, ...]: t must be a type.\"\n origin=_type_check(params[0],msg)\n metadata=tuple(params[1:])\n return _AnnotatedAlias(origin,metadata)\n \n def __init_subclass__(cls,*args,**kwargs):\n raise TypeError(\n \"Cannot subclass {}.Annotated\".format(cls.__module__)\n )\n \n \ndef runtime_checkable(cls):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not issubclass(cls,Generic)or not cls._is_protocol:\n raise TypeError('@runtime_checkable can be only applied to protocol classes,'\n ' got %r'%cls)\n cls._is_runtime_protocol=True\n return cls\n \n \ndef cast(typ,val):\n ''\n\n\n\n\n\n \n return val\n \n \ndef _get_defaults(func):\n ''\n try :\n code=func.__code__\n except AttributeError:\n \n return {}\n pos_count=code.co_argcount\n arg_names=code.co_varnames\n arg_names=arg_names[:pos_count]\n defaults=func.__defaults__ or ()\n kwdefaults=func.__kwdefaults__\n res=dict(kwdefaults)if kwdefaults else {}\n pos_offset=pos_count -len(defaults)\n for name,value in zip(arg_names[pos_offset:],defaults):\n assert name not in res\n res[name]=value\n return res\n \n \n_allowed_types=(types.FunctionType,types.BuiltinFunctionType,\ntypes.MethodType,types.ModuleType,\nWrapperDescriptorType,MethodWrapperType,MethodDescriptorType)\n\n\ndef get_type_hints(obj,globalns=None ,localns=None ,include_extras=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if getattr(obj,'__no_type_check__',None ):\n return {}\n \n if isinstance(obj,type):\n hints={}\n for base in reversed(obj.__mro__):\n if globalns is None :\n base_globals=sys.modules[base.__module__].__dict__\n else :\n base_globals=globalns\n ann=base.__dict__.get('__annotations__',{})\n for name,value in ann.items():\n if value is None :\n value=type(None )\n if isinstance(value,str):\n value=ForwardRef(value,is_argument=False )\n value=_eval_type(value,base_globals,localns)\n hints[name]=value\n return hints if include_extras else {k:_strip_annotations(t)for k,t in hints.items()}\n \n if globalns is None :\n if isinstance(obj,types.ModuleType):\n globalns=obj.__dict__\n else :\n nsobj=obj\n \n while hasattr(nsobj,'__wrapped__'):\n nsobj=nsobj.__wrapped__\n globalns=getattr(nsobj,'__globals__',{})\n if localns is None :\n localns=globalns\n elif localns is None :\n localns=globalns\n hints=getattr(obj,'__annotations__',None )\n if hints is None :\n \n if isinstance(obj,_allowed_types):\n return {}\n else :\n raise TypeError('{!r} is not a module, class, method, '\n 'or function.'.format(obj))\n defaults=_get_defaults(obj)\n hints=dict(hints)\n for name,value in hints.items():\n if value is None :\n value=type(None )\n if isinstance(value,str):\n value=ForwardRef(value)\n value=_eval_type(value,globalns,localns)\n if name in defaults and defaults[name]is None :\n value=Optional[value]\n hints[name]=value\n return hints if include_extras else {k:_strip_annotations(t)for k,t in hints.items()}\n \n \ndef _strip_annotations(t):\n ''\n \n if isinstance(t,_AnnotatedAlias):\n return _strip_annotations(t.__origin__)\n if isinstance(t,_GenericAlias):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return t.copy_with(stripped_args)\n if isinstance(t,GenericAlias):\n stripped_args=tuple(_strip_annotations(a)for a in t.__args__)\n if stripped_args ==t.__args__:\n return t\n return GenericAlias(t.__origin__,stripped_args)\n return t\n \n \ndef get_origin(tp):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(tp,_AnnotatedAlias):\n return Annotated\n if isinstance(tp,(_BaseGenericAlias,GenericAlias)):\n return tp.__origin__\n if tp is Generic:\n return Generic\n return None\n \n \ndef get_args(tp):\n ''\n\n\n\n\n\n\n\n\n \n if isinstance(tp,_AnnotatedAlias):\n return (tp.__origin__,)+tp.__metadata__\n if isinstance(tp,_GenericAlias):\n res=tp.__args__\n if tp.__origin__ is collections.abc.Callable and res[0]is not Ellipsis:\n res=(list(res[:-1]),res[-1])\n return res\n if isinstance(tp,GenericAlias):\n return tp.__args__\n return ()\n \n \ndef no_type_check(arg):\n ''\n\n\n\n\n\n\n \n if isinstance(arg,type):\n arg_attrs=arg.__dict__.copy()\n for attr,val in arg.__dict__.items():\n if val in arg.__bases__+(arg,):\n arg_attrs.pop(attr)\n for obj in arg_attrs.values():\n if isinstance(obj,types.FunctionType):\n obj.__no_type_check__=True\n if isinstance(obj,type):\n no_type_check(obj)\n try :\n arg.__no_type_check__=True\n except TypeError:\n pass\n return arg\n \n \ndef no_type_check_decorator(decorator):\n ''\n\n\n\n \n \n @functools.wraps(decorator)\n def wrapped_decorator(*args,**kwds):\n func=decorator(*args,**kwds)\n func=no_type_check(func)\n return func\n \n return wrapped_decorator\n \n \ndef _overload_dummy(*args,**kwds):\n ''\n raise NotImplementedError(\n \"You should not call an overloaded function. \"\n \"A series of @overload-decorated functions \"\n \"outside a stub module should always be followed \"\n \"by an implementation that is not @overload-ed.\")\n \n \ndef overload(func):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _overload_dummy\n \n \ndef final(f):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return f\n \n \n \n \nT=TypeVar('T')\nKT=TypeVar('KT')\nVT=TypeVar('VT')\nT_co=TypeVar('T_co',covariant=True )\nV_co=TypeVar('V_co',covariant=True )\nVT_co=TypeVar('VT_co',covariant=True )\nT_contra=TypeVar('T_contra',contravariant=True )\n\nCT_co=TypeVar('CT_co',covariant=True ,bound=type)\n\n\n\nAnyStr=TypeVar('AnyStr',bytes,str)\n\n\n\n_alias=_SpecialGenericAlias\n\nHashable=_alias(collections.abc.Hashable,0)\nAwaitable=_alias(collections.abc.Awaitable,1)\nCoroutine=_alias(collections.abc.Coroutine,3)\nAsyncIterable=_alias(collections.abc.AsyncIterable,1)\nAsyncIterator=_alias(collections.abc.AsyncIterator,1)\nIterable=_alias(collections.abc.Iterable,1)\nIterator=_alias(collections.abc.Iterator,1)\nReversible=_alias(collections.abc.Reversible,1)\nSized=_alias(collections.abc.Sized,0)\nContainer=_alias(collections.abc.Container,1)\nCollection=_alias(collections.abc.Collection,1)\nCallable=_CallableType(collections.abc.Callable,2)\nCallable.__doc__=\\\n\"\"\"Callable type; Callable[[int], str] is a function of (int) -> str.\n\n The subscription syntax must always be used with exactly two\n values: the argument list and the return type. The argument list\n must be a list of types or ellipsis; the return type must be a single type.\n\n There is no syntax to indicate optional or keyword arguments,\n such function types are rarely used as callback types.\n \"\"\"\nAbstractSet=_alias(collections.abc.Set,1,name='AbstractSet')\nMutableSet=_alias(collections.abc.MutableSet,1)\n\nMapping=_alias(collections.abc.Mapping,2)\nMutableMapping=_alias(collections.abc.MutableMapping,2)\nSequence=_alias(collections.abc.Sequence,1)\nMutableSequence=_alias(collections.abc.MutableSequence,1)\nByteString=_alias(collections.abc.ByteString,0)\n\nTuple=_TupleType(tuple,-1,inst=False ,name='Tuple')\nTuple.__doc__=\\\n\"\"\"Tuple type; Tuple[X, Y] is the cross-product type of X and Y.\n\n Example: Tuple[T1, T2] is a tuple of two elements corresponding\n to type variables T1 and T2. Tuple[int, float, str] is a tuple\n of an int, a float and a string.\n\n To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].\n \"\"\"\nList=_alias(list,1,inst=False ,name='List')\nDeque=_alias(collections.deque,1,name='Deque')\nSet=_alias(set,1,inst=False ,name='Set')\nFrozenSet=_alias(frozenset,1,inst=False ,name='FrozenSet')\nMappingView=_alias(collections.abc.MappingView,1)\nKeysView=_alias(collections.abc.KeysView,1)\nItemsView=_alias(collections.abc.ItemsView,2)\nValuesView=_alias(collections.abc.ValuesView,1)\nContextManager=_alias(contextlib.AbstractContextManager,1,name='ContextManager')\nAsyncContextManager=_alias(contextlib.AbstractAsyncContextManager,1,name='AsyncContextManager')\nDict=_alias(dict,2,inst=False ,name='Dict')\nDefaultDict=_alias(collections.defaultdict,2,name='DefaultDict')\nOrderedDict=_alias(collections.OrderedDict,2)\nCounter=_alias(collections.Counter,1)\nChainMap=_alias(collections.ChainMap,2)\nGenerator=_alias(collections.abc.Generator,3)\nAsyncGenerator=_alias(collections.abc.AsyncGenerator,2)\nType=_alias(type,1,inst=False ,name='Type')\nType.__doc__=\\\n\"\"\"A special construct usable to annotate class objects.\n\n For example, suppose we have the following classes::\n\n class User: ... # Abstract base for User classes\n class BasicUser(User): ...\n class ProUser(User): ...\n class TeamUser(User): ...\n\n And a function that takes a class argument that's a subclass of\n User and returns an instance of the corresponding class::\n\n U = TypeVar('U', bound=User)\n def new_user(user_class: Type[U]) -> U:\n user = user_class()\n # (Here we could write the user object to a database)\n return user\n\n joe = new_user(BasicUser)\n\n At this point the type checker knows that joe has type BasicUser.\n \"\"\"\n\n\n@runtime_checkable\nclass SupportsInt(Protocol):\n ''\n __slots__=()\n \n @abstractmethod\n def __int__(self)->int:\n pass\n \n \n@runtime_checkable\nclass SupportsFloat(Protocol):\n ''\n __slots__=()\n \n @abstractmethod\n def __float__(self)->float:\n pass\n \n \n@runtime_checkable\nclass SupportsComplex(Protocol):\n ''\n __slots__=()\n \n @abstractmethod\n def __complex__(self)->complex:\n pass\n \n \n@runtime_checkable\nclass SupportsBytes(Protocol):\n ''\n __slots__=()\n \n @abstractmethod\n def __bytes__(self)->bytes:\n pass\n \n \n@runtime_checkable\nclass SupportsIndex(Protocol):\n ''\n __slots__=()\n \n @abstractmethod\n def __index__(self)->int:\n pass\n \n \n@runtime_checkable\nclass SupportsAbs(Protocol[T_co]):\n ''\n __slots__=()\n \n @abstractmethod\n def __abs__(self)->T_co:\n pass\n \n \n@runtime_checkable\nclass SupportsRound(Protocol[T_co]):\n ''\n __slots__=()\n \n @abstractmethod\n def __round__(self,ndigits:int=0)->T_co:\n pass\n \n \ndef _make_nmtuple(name,types,module,defaults=()):\n fields=[n for n,t in types]\n types={n:_type_check(t,f\"field {n} annotation must be a type\")\n for n,t in types}\n nm_tpl=collections.namedtuple(name,fields,\n defaults=defaults,module=module)\n nm_tpl.__annotations__=nm_tpl.__new__.__annotations__=types\n return nm_tpl\n \n \n \n_prohibited=frozenset({'__new__','__init__','__slots__','__getnewargs__',\n'_fields','_field_defaults',\n'_make','_replace','_asdict','_source'})\n\n_special=frozenset({'__module__','__name__','__annotations__'})\n\n\nclass NamedTupleMeta(type):\n\n def __new__(cls,typename,bases,ns):\n assert bases[0]is _NamedTuple\n types=ns.get('__annotations__',{})\n default_names=[]\n for field_name in types:\n if field_name in ns:\n default_names.append(field_name)\n elif default_names:\n raise TypeError(f\"Non-default namedtuple field {field_name} \"\n f\"cannot follow default field\"\n f\"{'s' if len(default_names) > 1 else ''} \"\n f\"{', '.join(default_names)}\")\n nm_tpl=_make_nmtuple(typename,types.items(),\n defaults=[ns[n]for n in default_names],\n module=ns['__module__'])\n \n for key in ns:\n if key in _prohibited:\n raise AttributeError(\"Cannot overwrite NamedTuple attribute \"+key)\n elif key not in _special and key not in nm_tpl._fields:\n setattr(nm_tpl,key,ns[key])\n return nm_tpl\n \n \ndef NamedTuple(typename,fields=None ,/,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if fields is None :\n fields=kwargs.items()\n elif kwargs:\n raise TypeError(\"Either list of fields or keywords\"\n \" can be provided to NamedTuple, not both\")\n try :\n module=sys._getframe(1).f_globals.get('__name__','__main__')\n except (AttributeError,ValueError):\n module=None\n return _make_nmtuple(typename,fields,module=module)\n \n_NamedTuple=type.__new__(NamedTupleMeta,'NamedTuple',(),{})\n\ndef _namedtuple_mro_entries(bases):\n if len(bases)>1:\n raise TypeError(\"Multiple inheritance with NamedTuple is not supported\")\n assert bases[0]is NamedTuple\n return (_NamedTuple,)\n \nNamedTuple.__mro_entries__=_namedtuple_mro_entries\n\n\nclass _TypedDictMeta(type):\n def __new__(cls,name,bases,ns,total=True ):\n ''\n\n\n\n\n\n \n for base in bases:\n if type(base)is not _TypedDictMeta:\n raise TypeError('cannot inherit from both a TypedDict type '\n 'and a non-TypedDict base class')\n tp_dict=type.__new__(_TypedDictMeta,name,(dict,),ns)\n \n annotations={}\n own_annotations=ns.get('__annotations__',{})\n own_annotation_keys=set(own_annotations.keys())\n msg=\"TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type\"\n own_annotations={\n n:_type_check(tp,msg)for n,tp in own_annotations.items()\n }\n required_keys=set()\n optional_keys=set()\n \n for base in bases:\n annotations.update(base.__dict__.get('__annotations__',{}))\n required_keys.update(base.__dict__.get('__required_keys__',()))\n optional_keys.update(base.__dict__.get('__optional_keys__',()))\n \n annotations.update(own_annotations)\n if total:\n required_keys.update(own_annotation_keys)\n else :\n optional_keys.update(own_annotation_keys)\n \n tp_dict.__annotations__=annotations\n tp_dict.__required_keys__=frozenset(required_keys)\n tp_dict.__optional_keys__=frozenset(optional_keys)\n if not hasattr(tp_dict,'__total__'):\n tp_dict.__total__=total\n return tp_dict\n \n __call__=dict\n \n def __subclasscheck__(cls,other):\n \n raise TypeError('TypedDict does not support instance and class checks')\n \n __instancecheck__=__subclasscheck__\n \n \ndef TypedDict(typename,fields=None ,/,*,total=True ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if fields is None :\n fields=kwargs\n elif kwargs:\n raise TypeError(\"TypedDict takes either a dict or keyword arguments,\"\n \" but not both\")\n \n ns={'__annotations__':dict(fields),'__total__':total}\n try :\n \n ns['__module__']=sys._getframe(1).f_globals.get('__name__','__main__')\n except (AttributeError,ValueError):\n pass\n \n return _TypedDictMeta(typename,(),ns)\n \n_TypedDict=type.__new__(_TypedDictMeta,'TypedDict',(),{})\nTypedDict.__mro_entries__=lambda bases:(_TypedDict,)\n\n\ndef NewType(name,tp):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def new_type(x):\n return x\n \n new_type.__name__=name\n new_type.__supertype__=tp\n return new_type\n \n \n \nText=str\n\n\n\nTYPE_CHECKING=False\n\n\nclass IO(Generic[AnyStr]):\n ''\n\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @property\n @abstractmethod\n def mode(self)->str:\n pass\n \n @property\n @abstractmethod\n def name(self)->str:\n pass\n \n @abstractmethod\n def close(self)->None :\n pass\n \n @property\n @abstractmethod\n def closed(self)->bool:\n pass\n \n @abstractmethod\n def fileno(self)->int:\n pass\n \n @abstractmethod\n def flush(self)->None :\n pass\n \n @abstractmethod\n def isatty(self)->bool:\n pass\n \n @abstractmethod\n def read(self,n:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readable(self)->bool:\n pass\n \n @abstractmethod\n def readline(self,limit:int=-1)->AnyStr:\n pass\n \n @abstractmethod\n def readlines(self,hint:int=-1)->List[AnyStr]:\n pass\n \n @abstractmethod\n def seek(self,offset:int,whence:int=0)->int:\n pass\n \n @abstractmethod\n def seekable(self)->bool:\n pass\n \n @abstractmethod\n def tell(self)->int:\n pass\n \n @abstractmethod\n def truncate(self,size:int=None )->int:\n pass\n \n @abstractmethod\n def writable(self)->bool:\n pass\n \n @abstractmethod\n def write(self,s:AnyStr)->int:\n pass\n \n @abstractmethod\n def writelines(self,lines:List[AnyStr])->None :\n pass\n \n @abstractmethod\n def __enter__(self)->'IO[AnyStr]':\n pass\n \n @abstractmethod\n def __exit__(self,type,value,traceback)->None :\n pass\n \n \nclass BinaryIO(IO[bytes]):\n ''\n \n __slots__=()\n \n @abstractmethod\n def write(self,s:Union[bytes,bytearray])->int:\n pass\n \n @abstractmethod\n def __enter__(self)->'BinaryIO':\n pass\n \n \nclass TextIO(IO[str]):\n ''\n \n __slots__=()\n \n @property\n @abstractmethod\n def buffer(self)->BinaryIO:\n pass\n \n @property\n @abstractmethod\n def encoding(self)->str:\n pass\n \n @property\n @abstractmethod\n def errors(self)->Optional[str]:\n pass\n \n @property\n @abstractmethod\n def line_buffering(self)->bool:\n pass\n \n @property\n @abstractmethod\n def newlines(self)->Any:\n pass\n \n @abstractmethod\n def __enter__(self)->'TextIO':\n pass\n \n \nclass io:\n ''\n \n __all__=['IO','TextIO','BinaryIO']\n IO=IO\n TextIO=TextIO\n BinaryIO=BinaryIO\n \n \nio.__name__=__name__+'.io'\nsys.modules[io.__name__]=io\n\nPattern=_alias(stdlib_re.Pattern,1)\nMatch=_alias(stdlib_re.Match,1)\n\nclass re:\n ''\n \n __all__=['Pattern','Match']\n Pattern=Pattern\n Match=Match\n \n \nre.__name__=__name__+'.re'\nsys.modules[re.__name__]=re\n", ["abc", "collections", "collections.abc", "contextlib", "functools", "operator", "re", "sys", "types"]], "uu": [".py", "#! /usr/bin/env python3\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"Implementation of the UUencode and UUdecode functions.\n\nencode(in_file, out_file [,name, mode], *, backtick=False)\ndecode(in_file [, out_file, mode, quiet])\n\"\"\"\n\nimport binascii\nimport os\nimport sys\n\n__all__=[\"Error\",\"encode\",\"decode\"]\n\nclass Error(Exception):\n pass\n \ndef encode(in_file,out_file,name=None ,mode=None ,*,backtick=False ):\n ''\n \n \n \n opened_files=[]\n try :\n if in_file =='-':\n in_file=sys.stdin.buffer\n elif isinstance(in_file,str):\n if name is None :\n name=os.path.basename(in_file)\n if mode is None :\n try :\n mode=os.stat(in_file).st_mode\n except AttributeError:\n pass\n in_file=open(in_file,'rb')\n opened_files.append(in_file)\n \n \n \n if out_file =='-':\n out_file=sys.stdout.buffer\n elif isinstance(out_file,str):\n out_file=open(out_file,'wb')\n opened_files.append(out_file)\n \n \n \n if name is None :\n name='-'\n if mode is None :\n mode=0o666\n \n \n \n \n name=name.replace('\\n','\\\\n')\n name=name.replace('\\r','\\\\r')\n \n \n \n \n out_file.write(('begin %o %s\\n'%((mode&0o777),name)).encode(\"ascii\"))\n data=in_file.read(45)\n while len(data)>0:\n out_file.write(binascii.b2a_uu(data,backtick=backtick))\n data=in_file.read(45)\n if backtick:\n out_file.write(b'`\\nend\\n')\n else :\n out_file.write(b' \\nend\\n')\n finally :\n for f in opened_files:\n f.close()\n \n \ndef decode(in_file,out_file=None ,mode=None ,quiet=False ):\n ''\n \n \n \n opened_files=[]\n if in_file =='-':\n in_file=sys.stdin.buffer\n elif isinstance(in_file,str):\n in_file=open(in_file,'rb')\n opened_files.append(in_file)\n \n try :\n \n \n \n while True :\n hdr=in_file.readline()\n if not hdr:\n raise Error('No valid begin line found in input file')\n if not hdr.startswith(b'begin'):\n continue\n hdrfields=hdr.split(b' ',2)\n if len(hdrfields)==3 and hdrfields[0]==b'begin':\n try :\n int(hdrfields[1],8)\n break\n except ValueError:\n pass\n if out_file is None :\n \n out_file=hdrfields[2].rstrip(b' \\t\\r\\n\\f').decode(\"ascii\")\n if os.path.exists(out_file):\n raise Error('Cannot overwrite existing file: %s'%out_file)\n if mode is None :\n mode=int(hdrfields[1],8)\n \n \n \n if out_file =='-':\n out_file=sys.stdout.buffer\n elif isinstance(out_file,str):\n fp=open(out_file,'wb')\n os.chmod(out_file,mode)\n out_file=fp\n opened_files.append(out_file)\n \n \n \n s=in_file.readline()\n while s and s.strip(b' \\t\\r\\n\\f')!=b'end':\n try :\n data=binascii.a2b_uu(s)\n except binascii.Error as v:\n \n nbytes=(((s[0]-32)&63)*4+5)//3\n data=binascii.a2b_uu(s[:nbytes])\n if not quiet:\n sys.stderr.write(\"Warning: %s\\n\"%v)\n out_file.write(data)\n s=in_file.readline()\n if not s:\n raise Error('Truncated input file')\n finally :\n for f in opened_files:\n f.close()\n \ndef test():\n ''\n \n import optparse\n parser=optparse.OptionParser(usage='usage: %prog [-d] [-t] [input [output]]')\n parser.add_option('-d','--decode',dest='decode',help='Decode (instead of encode)?',default=False ,action='store_true')\n parser.add_option('-t','--text',dest='text',help='data is text, encoded format unix-compatible text?',default=False ,action='store_true')\n \n (options,args)=parser.parse_args()\n if len(args)>2:\n parser.error('incorrect number of arguments')\n sys.exit(1)\n \n \n input=sys.stdin.buffer\n output=sys.stdout.buffer\n if len(args)>0:\n input=args[0]\n if len(args)>1:\n output=args[1]\n \n if options.decode:\n if options.text:\n if isinstance(output,str):\n output=open(output,'wb')\n else :\n print(sys.argv[0],': cannot do -t to stdout')\n sys.exit(1)\n decode(input,output)\n else :\n if options.text:\n if isinstance(input,str):\n input=open(input,'rb')\n else :\n print(sys.argv[0],': cannot do -t from stdin')\n sys.exit(1)\n encode(input,output)\n \nif __name__ =='__main__':\n test()\n", ["binascii", "optparse", "os", "sys"]], "uuid": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport os\nimport sys\n\nfrom enum import Enum\n\n\n__author__='Ka-Ping Yee '\n\n\nif sys.platform in ('win32','darwin'):\n _AIX=_LINUX=False\nelse :\n import platform\n _platform_system=platform.system()\n _AIX=_platform_system =='AIX'\n _LINUX=_platform_system =='Linux'\n \n_MAC_DELIM=b':'\n_MAC_OMITS_LEADING_ZEROES=False\nif _AIX:\n _MAC_DELIM=b'.'\n _MAC_OMITS_LEADING_ZEROES=True\n \nRESERVED_NCS,RFC_4122,RESERVED_MICROSOFT,RESERVED_FUTURE=[\n'reserved for NCS compatibility','specified in RFC 4122',\n'reserved for Microsoft compatibility','reserved for future definition']\n\nint_=int\nbytes_=bytes\n\n\nclass SafeUUID(Enum):\n safe=0\n unsafe=-1\n unknown=None\n \n \nclass UUID:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __slots__=('int','is_safe','__weakref__')\n \n def __init__(self,hex=None ,bytes=None ,bytes_le=None ,fields=None ,\n int=None ,version=None ,\n *,is_safe=SafeUUID.unknown):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n if [hex,bytes,bytes_le,fields,int].count(None )!=4:\n raise TypeError('one of the hex, bytes, bytes_le, fields, '\n 'or int arguments must be given')\n if hex is not None :\n hex=hex.replace('urn:','').replace('uuid:','')\n hex=hex.strip('{}').replace('-','')\n if len(hex)!=32:\n raise ValueError('badly formed hexadecimal UUID string')\n int=int_(hex,16)\n if bytes_le is not None :\n if len(bytes_le)!=16:\n raise ValueError('bytes_le is not a 16-char string')\n bytes=(bytes_le[4 -1::-1]+bytes_le[6 -1:4 -1:-1]+\n bytes_le[8 -1:6 -1:-1]+bytes_le[8:])\n if bytes is not None :\n if len(bytes)!=16:\n raise ValueError('bytes is not a 16-char string')\n assert isinstance(bytes,bytes_),repr(bytes)\n int=int_.from_bytes(bytes,byteorder='big')\n if fields is not None :\n if len(fields)!=6:\n raise ValueError('fields is not a 6-tuple')\n (time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node)=fields\n if not 0 <=time_low <1 <<32:\n raise ValueError('field 1 out of range (need a 32-bit value)')\n if not 0 <=time_mid <1 <<16:\n raise ValueError('field 2 out of range (need a 16-bit value)')\n if not 0 <=time_hi_version <1 <<16:\n raise ValueError('field 3 out of range (need a 16-bit value)')\n if not 0 <=clock_seq_hi_variant <1 <<8:\n raise ValueError('field 4 out of range (need an 8-bit value)')\n if not 0 <=clock_seq_low <1 <<8:\n raise ValueError('field 5 out of range (need an 8-bit value)')\n if not 0 <=node <1 <<48:\n raise ValueError('field 6 out of range (need a 48-bit value)')\n clock_seq=(clock_seq_hi_variant <<8)|clock_seq_low\n int=((time_low <<96)|(time_mid <<80)|\n (time_hi_version <<64)|(clock_seq <<48)|node)\n if int is not None :\n if not 0 <=int <1 <<128:\n raise ValueError('int is out of range (need a 128-bit value)')\n if version is not None :\n if not 1 <=version <=5:\n raise ValueError('illegal version number')\n \n int &=~(0xc000 <<48)\n int |=0x8000 <<48\n \n int &=~(0xf000 <<64)\n int |=version <<76\n object.__setattr__(self,'int',int)\n object.__setattr__(self,'is_safe',is_safe)\n \n def __getstate__(self):\n d={'int':self.int}\n if self.is_safe !=SafeUUID.unknown:\n \n \n d['is_safe']=self.is_safe.value\n return d\n \n def __setstate__(self,state):\n object.__setattr__(self,'int',state['int'])\n \n object.__setattr__(self,'is_safe',\n SafeUUID(state['is_safe'])\n if 'is_safe'in state else SafeUUID.unknown)\n \n def __eq__(self,other):\n if isinstance(other,UUID):\n return self.int ==other.int\n return NotImplemented\n \n \n \n \n def __lt__(self,other):\n if isinstance(other,UUID):\n return self.int other.int\n return NotImplemented\n \n def __le__(self,other):\n if isinstance(other,UUID):\n return self.int <=other.int\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,UUID):\n return self.int >=other.int\n return NotImplemented\n \n def __hash__(self):\n return hash(self.int)\n \n def __int__(self):\n return self.int\n \n def __repr__(self):\n return '%s(%r)'%(self.__class__.__name__,str(self))\n \n def __setattr__(self,name,value):\n raise TypeError('UUID objects are immutable')\n \n def __str__(self):\n hex='%032x'%self.int\n return '%s-%s-%s-%s-%s'%(\n hex[:8],hex[8:12],hex[12:16],hex[16:20],hex[20:])\n \n @property\n def bytes(self):\n return self.int.to_bytes(16,'big')\n \n @property\n def bytes_le(self):\n bytes=self.bytes\n return (bytes[4 -1::-1]+bytes[6 -1:4 -1:-1]+bytes[8 -1:6 -1:-1]+\n bytes[8:])\n \n @property\n def fields(self):\n return (self.time_low,self.time_mid,self.time_hi_version,\n self.clock_seq_hi_variant,self.clock_seq_low,self.node)\n \n @property\n def time_low(self):\n return self.int >>96\n \n @property\n def time_mid(self):\n return (self.int >>80)&0xffff\n \n @property\n def time_hi_version(self):\n return (self.int >>64)&0xffff\n \n @property\n def clock_seq_hi_variant(self):\n return (self.int >>56)&0xff\n \n @property\n def clock_seq_low(self):\n return (self.int >>48)&0xff\n \n @property\n def time(self):\n return (((self.time_hi_version&0x0fff)<<48)|\n (self.time_mid <<32)|self.time_low)\n \n @property\n def clock_seq(self):\n return (((self.clock_seq_hi_variant&0x3f)<<8)|\n self.clock_seq_low)\n \n @property\n def node(self):\n return self.int&0xffffffffffff\n \n @property\n def hex(self):\n return '%032x'%self.int\n \n @property\n def urn(self):\n return 'urn:uuid:'+str(self)\n \n @property\n def variant(self):\n if not self.int&(0x8000 <<48):\n return RESERVED_NCS\n elif not self.int&(0x4000 <<48):\n return RFC_4122\n elif not self.int&(0x2000 <<48):\n return RESERVED_MICROSOFT\n else :\n return RESERVED_FUTURE\n \n @property\n def version(self):\n \n if self.variant ==RFC_4122:\n return int((self.int >>76)&0xf)\n \n \ndef _get_command_stdout(command,*args):\n import io,os,shutil,subprocess\n \n try :\n path_dirs=os.environ.get('PATH',os.defpath).split(os.pathsep)\n path_dirs.extend(['/sbin','/usr/sbin'])\n executable=shutil.which(command,path=os.pathsep.join(path_dirs))\n if executable is None :\n return None\n \n \n \n env=dict(os.environ)\n env['LC_ALL']='C'\n proc=subprocess.Popen((executable,)+args,\n stdout=subprocess.PIPE,\n stderr=subprocess.DEVNULL,\n env=env)\n if not proc:\n return None\n stdout,stderr=proc.communicate()\n return io.BytesIO(stdout)\n except (OSError,subprocess.SubprocessError):\n return None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _is_universal(mac):\n return not (mac&(1 <<41))\n \n \ndef _find_mac_near_keyword(command,args,keywords,get_word_index):\n ''\n\n\n\n\n\n\n \n stdout=_get_command_stdout(command,args)\n if stdout is None :\n return None\n \n first_local_mac=None\n for line in stdout:\n words=line.lower().rstrip().split()\n for i in range(len(words)):\n if words[i]in keywords:\n try :\n word=words[get_word_index(i)]\n mac=int(word.replace(_MAC_DELIM,b''),16)\n except (ValueError,IndexError):\n \n \n \n \n \n pass\n else :\n if _is_universal(mac):\n return mac\n first_local_mac=first_local_mac or mac\n return first_local_mac or None\n \n \ndef _parse_mac(word):\n\n\n\n\n\n\n parts=word.split(_MAC_DELIM)\n if len(parts)!=6:\n return\n if _MAC_OMITS_LEADING_ZEROES:\n \n \n \n \n if not all(1 <=len(part)<=2 for part in parts):\n return\n hexstr=b''.join(part.rjust(2,b'0')for part in parts)\n else :\n if not all(len(part)==2 for part in parts):\n return\n hexstr=b''.join(parts)\n try :\n return int(hexstr,16)\n except ValueError:\n return\n \n \ndef _find_mac_under_heading(command,args,heading):\n ''\n\n\n\n\n \n stdout=_get_command_stdout(command,args)\n if stdout is None :\n return None\n \n keywords=stdout.readline().rstrip().split()\n try :\n column_index=keywords.index(heading)\n except ValueError:\n return None\n \n first_local_mac=None\n for line in stdout:\n words=line.rstrip().split()\n try :\n word=words[column_index]\n except IndexError:\n continue\n \n mac=_parse_mac(word)\n if mac is None :\n continue\n if _is_universal(mac):\n return mac\n if first_local_mac is None :\n first_local_mac=mac\n \n return first_local_mac\n \n \n \n \ndef _ifconfig_getnode():\n ''\n \n keywords=(b'hwaddr',b'ether',b'address:',b'lladdr')\n for args in ('','-a','-av'):\n mac=_find_mac_near_keyword('ifconfig',args,keywords,lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _ip_getnode():\n ''\n \n mac=_find_mac_near_keyword('ip','link',[b'link/ether'],lambda i:i+1)\n if mac:\n return mac\n return None\n \ndef _arp_getnode():\n ''\n import os,socket\n try :\n ip_addr=socket.gethostbyname(socket.gethostname())\n except OSError:\n return None\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode(ip_addr)],lambda i:-1)\n if mac:\n return mac\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode(ip_addr)],lambda i:i+1)\n if mac:\n return mac\n \n \n mac=_find_mac_near_keyword('arp','-an',[os.fsencode('(%s)'%ip_addr)],\n lambda i:i+2)\n \n if mac:\n return mac\n return None\n \ndef _lanscan_getnode():\n ''\n \n return _find_mac_near_keyword('lanscan','-ai',[b'lan0'],lambda i:0)\n \ndef _netstat_getnode():\n ''\n \n return _find_mac_under_heading('netstat','-ian',b'Address')\n \ndef _ipconfig_getnode():\n ''\n \n return _windll_getnode()\n \ndef _netbios_getnode():\n ''\n \n return _windll_getnode()\n \n \n \ntry :\n import _uuid\n _generate_time_safe=getattr(_uuid,\"generate_time_safe\",None )\n _UuidCreate=getattr(_uuid,\"UuidCreate\",None )\n _has_uuid_generate_time_safe=_uuid.has_uuid_generate_time_safe\nexcept ImportError:\n _uuid=None\n _generate_time_safe=None\n _UuidCreate=None\n _has_uuid_generate_time_safe=None\n \n \ndef _load_system_functions():\n ''\n \n \ndef _unix_getnode():\n ''\n if _generate_time_safe:\n uuid_time,_=_generate_time_safe()\n return UUID(bytes=uuid_time).node\n \ndef _windll_getnode():\n ''\n if _UuidCreate:\n uuid_bytes=_UuidCreate()\n return UUID(bytes_le=uuid_bytes).node\n \ndef _random_getnode():\n ''\n \n \n \n \n \n \n \n \n \n \n import random\n return random.getrandbits(48)|(1 <<40)\n \n \n \n \n \n \n \n \nif _LINUX:\n _OS_GETTERS=[_ip_getnode,_ifconfig_getnode]\nelif sys.platform =='darwin':\n _OS_GETTERS=[_ifconfig_getnode,_arp_getnode,_netstat_getnode]\nelif sys.platform =='win32':\n\n _OS_GETTERS=[]\nelif _AIX:\n _OS_GETTERS=[_netstat_getnode]\nelse :\n _OS_GETTERS=[_ifconfig_getnode,_ip_getnode,_arp_getnode,\n _netstat_getnode,_lanscan_getnode]\nif os.name =='posix':\n _GETTERS=[_unix_getnode]+_OS_GETTERS\nelif os.name =='nt':\n _GETTERS=[_windll_getnode]+_OS_GETTERS\nelse :\n _GETTERS=_OS_GETTERS\n \n_node=None\n\ndef getnode():\n ''\n\n\n\n\n\n \n global _node\n if _node is not None :\n return _node\n \n for getter in _GETTERS+[_random_getnode]:\n try :\n _node=getter()\n except :\n continue\n if (_node is not None )and (0 <=_node <(1 <<48)):\n return _node\n assert False ,'_random_getnode() returned invalid value: {}'.format(_node)\n \n \n_last_timestamp=None\n\ndef uuid1(node=None ,clock_seq=None ):\n ''\n\n\n \n \n \n \n if _generate_time_safe is not None and node is clock_seq is None :\n uuid_time,safely_generated=_generate_time_safe()\n try :\n is_safe=SafeUUID(safely_generated)\n except ValueError:\n is_safe=SafeUUID.unknown\n return UUID(bytes=uuid_time,is_safe=is_safe)\n \n global _last_timestamp\n import time\n nanoseconds=time.time_ns()\n \n \n timestamp=nanoseconds //100+0x01b21dd213814000\n if _last_timestamp is not None and timestamp <=_last_timestamp:\n timestamp=_last_timestamp+1\n _last_timestamp=timestamp\n if clock_seq is None :\n import random\n clock_seq=random.getrandbits(14)\n time_low=timestamp&0xffffffff\n time_mid=(timestamp >>32)&0xffff\n time_hi_version=(timestamp >>48)&0x0fff\n clock_seq_low=clock_seq&0xff\n clock_seq_hi_variant=(clock_seq >>8)&0x3f\n if node is None :\n node=getnode()\n return UUID(fields=(time_low,time_mid,time_hi_version,\n clock_seq_hi_variant,clock_seq_low,node),version=1)\n \ndef uuid3(namespace,name):\n ''\n from hashlib import md5\n digest=md5(\n namespace.bytes+bytes(name,\"utf-8\"),\n usedforsecurity=False\n ).digest()\n return UUID(bytes=digest[:16],version=3)\n \ndef uuid4():\n ''\n return UUID(bytes=os.urandom(16),version=4)\n \ndef uuid5(namespace,name):\n ''\n from hashlib import sha1\n hash=sha1(namespace.bytes+bytes(name,\"utf-8\")).digest()\n return UUID(bytes=hash[:16],version=5)\n \n \n \nNAMESPACE_DNS=UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_URL=UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_OID=UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')\nNAMESPACE_X500=UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')\n", ["_uuid", "enum", "hashlib", "io", "os", "platform", "random", "shutil", "socket", "subprocess", "sys", "time"]], "VFS_import": [".py", "import os\nimport sys\nfrom browser import doc\n\n\n\n\n\n\nVFS=dict(JSObject(__BRYTHON__.py_VFS))\nclass VFSModuleFinder:\n def __init__(self,path_entry):\n print(\"in VFSModuleFinder\")\n if path_entry.startswith('/libs')or path_entry.startswith('/Lib'):\n self.path_entry=path_entry\n else :\n raise ImportError()\n \n def __str__(self):\n return '<%s for \"%s\">'%(self.__class__.__name__,self.path_entry)\n \n def find_module(self,fullname,path=None ):\n path=path or self.path_entry\n \n for _ext in ['js','pyj','py']:\n _filepath=os.path.join(self.path_entry,'%s.%s'%(fullname,_ext))\n if _filepath in VFS:\n print(\"module found at %s:%s\"%(_filepath,fullname))\n return VFSModuleLoader(_filepath,fullname)\n \n print('module %s not found'%fullname)\n raise ImportError()\n return None\n \nclass VFSModuleLoader:\n ''\n \n def __init__(self,filepath,name):\n self._filepath=filepath\n self._name=name\n \n def get_source(self):\n if self._filepath in VFS:\n return JSObject(readFromVFS(self._filepath))\n \n raise ImportError('could not find source for %s'%fullname)\n \n def is_package(self):\n return '.'in self._name\n \n def load_module(self):\n if self._name in sys.modules:\n \n mod=sys.modules[self._name]\n return mod\n \n _src=self.get_source()\n if self._filepath.endswith('.js'):\n mod=JSObject(import_js_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.py'):\n mod=JSObject(import_py_module(_src,self._filepath,self._name))\n elif self._filepath.endswith('.pyj'):\n mod=JSObject(import_pyj_module(_src,self._filepath,self._name))\n else :\n raise ImportError('Invalid Module: %s'%self._filepath)\n \n \n mod.__file__=self._filepath\n mod.__name__=self._name\n mod.__path__=os.path.abspath(self._filepath)\n mod.__loader__=self\n mod.__package__='.'.join(self._name.split('.')[:-1])\n \n if self.is_package():\n print('adding path for package')\n \n \n mod.__path__=[self.path_entry]\n else :\n print('imported as regular module')\n \n print('creating a new module object for \"%s\"'%self._name)\n sys.modules.setdefault(self._name,mod)\n JSObject(__BRYTHON__.imported)[self._name]=mod\n \n return mod\n \nJSObject(__BRYTHON__.path_hooks.insert(0,VFSModuleFinder))\n", ["browser", "os", "sys"]], "warnings": [".py", "''\n\nimport sys\n\n\n__all__=[\"warn\",\"warn_explicit\",\"showwarning\",\n\"formatwarning\",\"filterwarnings\",\"simplefilter\",\n\"resetwarnings\",\"catch_warnings\"]\n\ndef showwarning(message,category,filename,lineno,file=None ,line=None ):\n ''\n msg=WarningMessage(message,category,filename,lineno,file,line)\n _showwarnmsg_impl(msg)\n \ndef formatwarning(message,category,filename,lineno,line=None ):\n ''\n msg=WarningMessage(message,category,filename,lineno,None ,line)\n return _formatwarnmsg_impl(msg)\n \ndef _showwarnmsg_impl(msg):\n file=msg.file\n if file is None :\n file=sys.stderr\n if file is None :\n \n \n return\n text=_formatwarnmsg(msg)\n try :\n file.write(text)\n except OSError:\n \n pass\n \ndef _formatwarnmsg_impl(msg):\n category=msg.category.__name__\n s=f\"{msg.filename}:{msg.lineno}: {category}: {msg.message}\\n\"\n \n if msg.line is None :\n try :\n import linecache\n line=linecache.getline(msg.filename,msg.lineno)\n except Exception:\n \n \n line=None\n linecache=None\n else :\n line=msg.line\n if line:\n line=line.strip()\n s +=\" %s\\n\"%line\n \n if msg.source is not None :\n try :\n import tracemalloc\n \n \n except Exception:\n \n tracing=True\n tb=None\n else :\n tracing=tracemalloc.is_tracing()\n try :\n tb=tracemalloc.get_object_traceback(msg.source)\n except Exception:\n \n \n tb=None\n \n if tb is not None :\n s +='Object allocated at (most recent call last):\\n'\n for frame in tb:\n s +=(' File \"%s\", lineno %s\\n'\n %(frame.filename,frame.lineno))\n \n try :\n if linecache is not None :\n line=linecache.getline(frame.filename,frame.lineno)\n else :\n line=None\n except Exception:\n line=None\n if line:\n line=line.strip()\n s +=' %s\\n'%line\n elif not tracing:\n s +=(f'{category}: Enable tracemalloc to get the object '\n f'allocation traceback\\n')\n return s\n \n \n_showwarning_orig=showwarning\n\ndef _showwarnmsg(msg):\n ''\n try :\n sw=showwarning\n except NameError:\n pass\n else :\n if sw is not _showwarning_orig:\n \n if not callable(sw):\n raise TypeError(\"warnings.showwarning() must be set to a \"\n \"function or method\")\n \n sw(msg.message,msg.category,msg.filename,msg.lineno,\n msg.file,msg.line)\n return\n _showwarnmsg_impl(msg)\n \n \n_formatwarning_orig=formatwarning\n\ndef _formatwarnmsg(msg):\n ''\n try :\n fw=formatwarning\n except NameError:\n pass\n else :\n if fw is not _formatwarning_orig:\n \n return fw(msg.message,msg.category,\n msg.filename,msg.lineno,msg.line)\n return _formatwarnmsg_impl(msg)\n \ndef filterwarnings(action,message=\"\",category=Warning,module=\"\",lineno=0,\nappend=False ):\n ''\n\n\n\n\n\n\n\n\n \n assert action in (\"error\",\"ignore\",\"always\",\"default\",\"module\",\n \"once\"),\"invalid action: %r\"%(action,)\n assert isinstance(message,str),\"message must be a string\"\n assert isinstance(category,type),\"category must be a class\"\n assert issubclass(category,Warning),\"category must be a Warning subclass\"\n assert isinstance(module,str),\"module must be a string\"\n assert isinstance(lineno,int)and lineno >=0,\\\n \"lineno must be an int >= 0\"\n \n if message or module:\n import re\n \n if message:\n message=re.compile(message,re.I)\n else :\n message=None\n if module:\n module=re.compile(module)\n else :\n module=None\n \n _add_filter(action,message,category,module,lineno,append=append)\n \ndef simplefilter(action,category=Warning,lineno=0,append=False ):\n ''\n\n\n\n\n\n\n\n \n assert action in (\"error\",\"ignore\",\"always\",\"default\",\"module\",\n \"once\"),\"invalid action: %r\"%(action,)\n assert isinstance(lineno,int)and lineno >=0,\\\n \"lineno must be an int >= 0\"\n _add_filter(action,None ,category,None ,lineno,append=append)\n \ndef _add_filter(*item,append):\n\n\n if not append:\n try :\n filters.remove(item)\n except ValueError:\n pass\n filters.insert(0,item)\n else :\n if item not in filters:\n filters.append(item)\n _filters_mutated()\n \ndef resetwarnings():\n ''\n filters[:]=[]\n _filters_mutated()\n \nclass _OptionError(Exception):\n ''\n pass\n \n \ndef _processoptions(args):\n for arg in args:\n try :\n _setoption(arg)\n except _OptionError as msg:\n print(\"Invalid -W option ignored:\",msg,file=sys.stderr)\n \n \ndef _setoption(arg):\n parts=arg.split(':')\n if len(parts)>5:\n raise _OptionError(\"too many fields (max 5): %r\"%(arg,))\n while len(parts)<5:\n parts.append('')\n action,message,category,module,lineno=[s.strip()\n for s in parts]\n action=_getaction(action)\n category=_getcategory(category)\n if message or module:\n import re\n if message:\n message=re.escape(message)\n if module:\n module=re.escape(module)+r'\\Z'\n if lineno:\n try :\n lineno=int(lineno)\n if lineno <0:\n raise ValueError\n except (ValueError,OverflowError):\n raise _OptionError(\"invalid lineno %r\"%(lineno,))from None\n else :\n lineno=0\n filterwarnings(action,message,category,module,lineno)\n \n \ndef _getaction(action):\n if not action:\n return \"default\"\n if action ==\"all\":return \"always\"\n for a in ('default','always','ignore','module','once','error'):\n if a.startswith(action):\n return a\n raise _OptionError(\"invalid action: %r\"%(action,))\n \n \ndef _getcategory(category):\n if not category:\n return Warning\n if '.'not in category:\n import builtins as m\n klass=category\n else :\n module,_,klass=category.rpartition('.')\n try :\n m=__import__(module,None ,None ,[klass])\n except ImportError:\n raise _OptionError(\"invalid module name: %r\"%(module,))from None\n try :\n cat=getattr(m,klass)\n except AttributeError:\n raise _OptionError(\"unknown warning category: %r\"%(category,))from None\n if not issubclass(cat,Warning):\n raise _OptionError(\"invalid warning category: %r\"%(category,))\n return cat\n \n \ndef _is_internal_frame(frame):\n ''\n filename=frame.f_code.co_filename\n return 'importlib'in filename and '_bootstrap'in filename\n \n \ndef _next_external_frame(frame):\n ''\n frame=frame.f_back\n while frame is not None and _is_internal_frame(frame):\n frame=frame.f_back\n return frame\n \n \n \ndef warn(message,category=None ,stacklevel=1,source=None ):\n ''\n \n if isinstance(message,Warning):\n category=message.__class__\n \n if category is None :\n category=UserWarning\n if not (isinstance(category,type)and issubclass(category,Warning)):\n raise TypeError(\"category must be a Warning subclass, \"\n \"not '{:s}'\".format(type(category).__name__))\n \n try :\n if stacklevel <=1 or _is_internal_frame(sys._getframe(1)):\n \n \n frame=sys._getframe(stacklevel)\n else :\n frame=sys._getframe(1)\n \n for x in range(stacklevel -1):\n frame=_next_external_frame(frame)\n if frame is None :\n raise ValueError\n except ValueError:\n globals=sys.__dict__\n filename=\"sys\"\n lineno=1\n else :\n globals=frame.f_globals\n filename=frame.f_code.co_filename\n lineno=frame.f_lineno\n if '__name__'in globals:\n module=globals['__name__']\n else :\n module=\"\"\n registry=globals.setdefault(\"__warningregistry__\",{})\n warn_explicit(message,category,filename,lineno,module,registry,\n globals,source)\n \ndef warn_explicit(message,category,filename,lineno,\nmodule=None ,registry=None ,module_globals=None ,\nsource=None ):\n lineno=int(lineno)\n if module is None :\n module=filename or \"\"\n if module[-3:].lower()==\".py\":\n module=module[:-3]\n if registry is None :\n registry={}\n if registry.get('version',0)!=_filters_version:\n registry.clear()\n registry['version']=_filters_version\n if isinstance(message,Warning):\n text=str(message)\n category=message.__class__\n else :\n text=message\n message=category(message)\n key=(text,category,lineno)\n \n if registry.get(key):\n return\n \n for item in filters:\n action,msg,cat,mod,ln=item\n if ((msg is None or msg.match(text))and\n issubclass(category,cat)and\n (mod is None or mod.match(module))and\n (ln ==0 or lineno ==ln)):\n break\n else :\n action=defaultaction\n \n if action ==\"ignore\":\n return\n \n \n \n import linecache\n linecache.getlines(filename,module_globals)\n \n if action ==\"error\":\n raise message\n \n if action ==\"once\":\n registry[key]=1\n oncekey=(text,category)\n if onceregistry.get(oncekey):\n return\n onceregistry[oncekey]=1\n elif action ==\"always\":\n pass\n elif action ==\"module\":\n registry[key]=1\n altkey=(text,category,0)\n if registry.get(altkey):\n return\n registry[altkey]=1\n elif action ==\"default\":\n registry[key]=1\n else :\n \n raise RuntimeError(\n \"Unrecognized action (%r) in warnings.filters:\\n %s\"%\n (action,item))\n \n msg=WarningMessage(message,category,filename,lineno,source)\n _showwarnmsg(msg)\n \n \nclass WarningMessage(object):\n\n _WARNING_DETAILS=(\"message\",\"category\",\"filename\",\"lineno\",\"file\",\n \"line\",\"source\")\n \n def __init__(self,message,category,filename,lineno,file=None ,\n line=None ,source=None ):\n self.message=message\n self.category=category\n self.filename=filename\n self.lineno=lineno\n self.file=file\n self.line=line\n self.source=source\n self._category_name=category.__name__ if category else None\n \n def __str__(self):\n return (\"{message : %r, category : %r, filename : %r, lineno : %s, \"\n \"line : %r}\"%(self.message,self._category_name,\n self.filename,self.lineno,self.line))\n \n \nclass catch_warnings(object):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,*,record=False ,module=None ):\n ''\n\n\n\n\n\n \n self._record=record\n self._module=sys.modules['warnings']if module is None else module\n self._entered=False\n \n def __repr__(self):\n args=[]\n if self._record:\n args.append(\"record=True\")\n if self._module is not sys.modules['warnings']:\n args.append(\"module=%r\"%self._module)\n name=type(self).__name__\n return \"%s(%s)\"%(name,\", \".join(args))\n \n def __enter__(self):\n if self._entered:\n raise RuntimeError(\"Cannot enter %r twice\"%self)\n self._entered=True\n self._filters=self._module.filters\n self._module.filters=self._filters[:]\n self._module._filters_mutated()\n self._showwarning=self._module.showwarning\n self._showwarnmsg_impl=self._module._showwarnmsg_impl\n if self._record:\n log=[]\n self._module._showwarnmsg_impl=log.append\n \n \n self._module.showwarning=self._module._showwarning_orig\n return log\n else :\n return None\n \n def __exit__(self,*exc_info):\n if not self._entered:\n raise RuntimeError(\"Cannot exit %r without entering first\"%self)\n self._module.filters=self._filters\n self._module._filters_mutated()\n self._module.showwarning=self._showwarning\n self._module._showwarnmsg_impl=self._showwarnmsg_impl\n \n \n \ndef _warn_unawaited_coroutine(coro):\n msg_lines=[\n f\"coroutine '{coro.__qualname__}' was never awaited\\n\"\n ]\n if coro.cr_origin is not None :\n import linecache,traceback\n def extract():\n for filename,lineno,funcname in reversed(coro.cr_origin):\n line=linecache.getline(filename,lineno)\n yield (filename,lineno,funcname,line)\n msg_lines.append(\"Coroutine created at (most recent call last)\\n\")\n msg_lines +=traceback.format_list(list(extract()))\n msg=\"\".join(msg_lines).rstrip(\"\\n\")\n \n \n \n \n \n \n warn(msg,category=RuntimeWarning,stacklevel=2,source=coro)\n \n \n \n \n \n \n \n \n \n \ntry :\n from _warnings import (filters,_defaultaction,_onceregistry,\n warn,warn_explicit,_filters_mutated)\n defaultaction=_defaultaction\n onceregistry=_onceregistry\n _warnings_defaults=True\nexcept ImportError:\n filters=[]\n defaultaction=\"default\"\n onceregistry={}\n \n _filters_version=1\n \n def _filters_mutated():\n global _filters_version\n _filters_version +=1\n \n _warnings_defaults=False\n \n \n \n_processoptions(sys.warnoptions)\nif not _warnings_defaults:\n\n if not hasattr(sys,'gettotalrefcount'):\n filterwarnings(\"default\",category=DeprecationWarning,\n module=\"__main__\",append=1)\n simplefilter(\"ignore\",category=DeprecationWarning,append=1)\n simplefilter(\"ignore\",category=PendingDeprecationWarning,append=1)\n simplefilter(\"ignore\",category=ImportWarning,append=1)\n simplefilter(\"ignore\",category=ResourceWarning,append=1)\n \ndel _warnings_defaults\n", ["_warnings", "builtins", "linecache", "re", "sys", "traceback", "tracemalloc"]], "weakref": [".py", "''\n\n\n\n\n\n\n\n\n\n\nfrom _weakref import (\ngetweakrefcount,\ngetweakrefs,\nref,\nproxy,\nCallableProxyType,\nProxyType,\nReferenceType,\n_remove_dead_weakref)\n\nfrom _weakrefset import WeakSet,_IterationGuard\n\nimport _collections_abc\nimport sys\nimport itertools\n\nProxyTypes=(ProxyType,CallableProxyType)\n\n__all__=[\"ref\",\"proxy\",\"getweakrefcount\",\"getweakrefs\",\n\"WeakKeyDictionary\",\"ReferenceType\",\"ProxyType\",\n\"CallableProxyType\",\"ProxyTypes\",\"WeakValueDictionary\",\n\"WeakSet\",\"WeakMethod\",\"finalize\"]\n\n\n_collections_abc.Set.register(WeakSet)\n_collections_abc.MutableSet.register(WeakSet)\n\nclass WeakMethod(ref):\n ''\n\n\n \n \n __slots__=\"_func_ref\",\"_meth_type\",\"_alive\",\"__weakref__\"\n \n def __new__(cls,meth,callback=None ):\n try :\n obj=meth.__self__\n func=meth.__func__\n except AttributeError:\n raise TypeError(\"argument should be a bound method, not {}\"\n .format(type(meth)))from None\n def _cb(arg):\n \n \n self=self_wr()\n if self._alive:\n self._alive=False\n if callback is not None :\n callback(self)\n self=ref.__new__(cls,obj,_cb)\n self._func_ref=ref(func,_cb)\n self._meth_type=type(meth)\n self._alive=True\n self_wr=ref(self)\n return self\n \n def __call__(self):\n obj=super().__call__()\n func=self._func_ref()\n if obj is None or func is None :\n return None\n return self._meth_type(func,obj)\n \n def __eq__(self,other):\n if isinstance(other,WeakMethod):\n if not self._alive or not other._alive:\n return self is other\n return ref.__eq__(self,other)and self._func_ref ==other._func_ref\n return NotImplemented\n \n def __ne__(self,other):\n if isinstance(other,WeakMethod):\n if not self._alive or not other._alive:\n return self is not other\n return ref.__ne__(self,other)or self._func_ref !=other._func_ref\n return NotImplemented\n \n __hash__=ref.__hash__\n \n \nclass WeakValueDictionary(_collections_abc.MutableMapping):\n ''\n\n\n\n \n \n \n \n \n \n \n def __init__(self,other=(),/,**kw):\n def remove(wr,selfref=ref(self),_atomic_removal=_remove_dead_weakref):\n self=selfref()\n if self is not None :\n if self._iterating:\n self._pending_removals.append(wr.key)\n else :\n \n \n _atomic_removal(self.data,wr.key)\n self._remove=remove\n \n self._pending_removals=[]\n self._iterating=set()\n self.data={}\n self.update(other,**kw)\n \n def _commit_removals(self):\n l=self._pending_removals\n d=self.data\n \n \n while l:\n key=l.pop()\n _remove_dead_weakref(d,key)\n \n def __getitem__(self,key):\n if self._pending_removals:\n self._commit_removals()\n o=self.data[key]()\n if o is None :\n raise KeyError(key)\n else :\n return o\n \n def __delitem__(self,key):\n if self._pending_removals:\n self._commit_removals()\n del self.data[key]\n \n def __len__(self):\n if self._pending_removals:\n self._commit_removals()\n return len(self.data)\n \n def __contains__(self,key):\n if self._pending_removals:\n self._commit_removals()\n try :\n o=self.data[key]()\n except KeyError:\n return False\n return o is not None\n \n def __repr__(self):\n return \"<%s at %#x>\"%(self.__class__.__name__,id(self))\n \n def __setitem__(self,key,value):\n if self._pending_removals:\n self._commit_removals()\n self.data[key]=KeyedRef(value,self._remove,key)\n \n def copy(self):\n if self._pending_removals:\n self._commit_removals()\n new=WeakValueDictionary()\n with _IterationGuard(self):\n for key,wr in self.data.items():\n o=wr()\n if o is not None :\n new[key]=o\n return new\n \n __copy__=copy\n \n def __deepcopy__(self,memo):\n from copy import deepcopy\n if self._pending_removals:\n self._commit_removals()\n new=self.__class__()\n with _IterationGuard(self):\n for key,wr in self.data.items():\n o=wr()\n if o is not None :\n new[deepcopy(key,memo)]=o\n return new\n \n def get(self,key,default=None ):\n if self._pending_removals:\n self._commit_removals()\n try :\n wr=self.data[key]\n except KeyError:\n return default\n else :\n o=wr()\n if o is None :\n \n return default\n else :\n return o\n \n def items(self):\n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n for k,wr in self.data.items():\n v=wr()\n if v is not None :\n yield k,v\n \n def keys(self):\n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n for k,wr in self.data.items():\n if wr()is not None :\n yield k\n \n __iter__=keys\n \n def itervaluerefs(self):\n ''\n\n\n\n\n\n\n\n \n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n yield from self.data.values()\n \n def values(self):\n if self._pending_removals:\n self._commit_removals()\n with _IterationGuard(self):\n for wr in self.data.values():\n obj=wr()\n if obj is not None :\n yield obj\n \n def popitem(self):\n if self._pending_removals:\n self._commit_removals()\n while True :\n key,wr=self.data.popitem()\n o=wr()\n if o is not None :\n return key,o\n \n def pop(self,key,*args):\n if self._pending_removals:\n self._commit_removals()\n try :\n o=self.data.pop(key)()\n except KeyError:\n o=None\n if o is None :\n if args:\n return args[0]\n else :\n raise KeyError(key)\n else :\n return o\n \n def setdefault(self,key,default=None ):\n try :\n o=self.data[key]()\n except KeyError:\n o=None\n if o is None :\n if self._pending_removals:\n self._commit_removals()\n self.data[key]=KeyedRef(default,self._remove,key)\n return default\n else :\n return o\n \n def update(self,other=None ,/,**kwargs):\n if self._pending_removals:\n self._commit_removals()\n d=self.data\n if other is not None :\n if not hasattr(other,\"items\"):\n other=dict(other)\n for key,o in other.items():\n d[key]=KeyedRef(o,self._remove,key)\n for key,o in kwargs.items():\n d[key]=KeyedRef(o,self._remove,key)\n \n def valuerefs(self):\n ''\n\n\n\n\n\n\n\n \n if self._pending_removals:\n self._commit_removals()\n return list(self.data.values())\n \n def __ior__(self,other):\n self.update(other)\n return self\n \n def __or__(self,other):\n if isinstance(other,_collections_abc.Mapping):\n c=self.copy()\n c.update(other)\n return c\n return NotImplemented\n \n def __ror__(self,other):\n if isinstance(other,_collections_abc.Mapping):\n c=self.__class__()\n c.update(other)\n c.update(self)\n return c\n return NotImplemented\n \n \nclass KeyedRef(ref):\n ''\n\n\n\n\n\n\n \n \n __slots__=\"key\",\n \n def __new__(type,ob,callback,key):\n self=ref.__new__(type,ob,callback)\n self.key=key\n return self\n \n def __init__(self,ob,callback,key):\n super().__init__(ob,callback)\n \n \nclass WeakKeyDictionary(_collections_abc.MutableMapping):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,dict=None ):\n self.data={}\n def remove(k,selfref=ref(self)):\n self=selfref()\n if self is not None :\n if self._iterating:\n self._pending_removals.append(k)\n else :\n del self.data[k]\n self._remove=remove\n \n self._pending_removals=[]\n self._iterating=set()\n self._dirty_len=False\n if dict is not None :\n self.update(dict)\n \n def _commit_removals(self):\n \n \n \n \n l=self._pending_removals\n d=self.data\n while l:\n try :\n del d[l.pop()]\n except KeyError:\n pass\n \n def _scrub_removals(self):\n d=self.data\n self._pending_removals=[k for k in self._pending_removals if k in d]\n self._dirty_len=False\n \n def __delitem__(self,key):\n self._dirty_len=True\n del self.data[ref(key)]\n \n def __getitem__(self,key):\n return self.data[ref(key)]\n \n def __len__(self):\n if self._dirty_len and self._pending_removals:\n \n \n self._scrub_removals()\n return len(self.data)-len(self._pending_removals)\n \n def __repr__(self):\n return \"<%s at %#x>\"%(self.__class__.__name__,id(self))\n \n def __setitem__(self,key,value):\n self.data[ref(key,self._remove)]=value\n \n def copy(self):\n new=WeakKeyDictionary()\n with _IterationGuard(self):\n for key,value in self.data.items():\n o=key()\n if o is not None :\n new[o]=value\n return new\n \n __copy__=copy\n \n def __deepcopy__(self,memo):\n from copy import deepcopy\n new=self.__class__()\n with _IterationGuard(self):\n for key,value in self.data.items():\n o=key()\n if o is not None :\n new[o]=deepcopy(value,memo)\n return new\n \n def get(self,key,default=None ):\n return self.data.get(ref(key),default)\n \n def __contains__(self,key):\n try :\n wr=ref(key)\n except TypeError:\n return False\n return wr in self.data\n \n def items(self):\n with _IterationGuard(self):\n for wr,value in self.data.items():\n key=wr()\n if key is not None :\n yield key,value\n \n def keys(self):\n with _IterationGuard(self):\n for wr in self.data:\n obj=wr()\n if obj is not None :\n yield obj\n \n __iter__=keys\n \n def values(self):\n with _IterationGuard(self):\n for wr,value in self.data.items():\n if wr()is not None :\n yield value\n \n def keyrefs(self):\n ''\n\n\n\n\n\n\n\n \n return list(self.data)\n \n def popitem(self):\n self._dirty_len=True\n while True :\n key,value=self.data.popitem()\n o=key()\n if o is not None :\n return o,value\n \n def pop(self,key,*args):\n self._dirty_len=True\n return self.data.pop(ref(key),*args)\n \n def setdefault(self,key,default=None ):\n return self.data.setdefault(ref(key,self._remove),default)\n \n def update(self,dict=None ,/,**kwargs):\n d=self.data\n if dict is not None :\n if not hasattr(dict,\"items\"):\n dict=type({})(dict)\n for key,value in dict.items():\n d[ref(key,self._remove)]=value\n if len(kwargs):\n self.update(kwargs)\n \n def __ior__(self,other):\n self.update(other)\n return self\n \n def __or__(self,other):\n if isinstance(other,_collections_abc.Mapping):\n c=self.copy()\n c.update(other)\n return c\n return NotImplemented\n \n def __ror__(self,other):\n if isinstance(other,_collections_abc.Mapping):\n c=self.__class__()\n c.update(other)\n c.update(self)\n return c\n return NotImplemented\n \n \nclass finalize:\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n __slots__=()\n _registry={}\n _shutdown=False\n _index_iter=itertools.count()\n _dirty=False\n _registered_with_atexit=False\n \n class _Info:\n __slots__=(\"weakref\",\"func\",\"args\",\"kwargs\",\"atexit\",\"index\")\n \n def __init__(self,obj,func,/,*args,**kwargs):\n if not self._registered_with_atexit:\n \n \n import atexit\n atexit.register(self._exitfunc)\n finalize._registered_with_atexit=True\n info=self._Info()\n info.weakref=ref(obj,self)\n info.func=func\n info.args=args\n info.kwargs=kwargs or None\n info.atexit=True\n info.index=next(self._index_iter)\n self._registry[self]=info\n finalize._dirty=True\n \n def __call__(self,_=None ):\n ''\n \n info=self._registry.pop(self,None )\n if info and not self._shutdown:\n return info.func(*info.args,**(info.kwargs or {}))\n \n def detach(self):\n ''\n \n info=self._registry.get(self)\n obj=info and info.weakref()\n if obj is not None and self._registry.pop(self,None ):\n return (obj,info.func,info.args,info.kwargs or {})\n \n def peek(self):\n ''\n \n info=self._registry.get(self)\n obj=info and info.weakref()\n if obj is not None :\n return (obj,info.func,info.args,info.kwargs or {})\n \n @property\n def alive(self):\n ''\n return self in self._registry\n \n @property\n def atexit(self):\n ''\n info=self._registry.get(self)\n return bool(info)and info.atexit\n \n @atexit.setter\n def atexit(self,value):\n info=self._registry.get(self)\n if info:\n info.atexit=bool(value)\n \n def __repr__(self):\n info=self._registry.get(self)\n obj=info and info.weakref()\n if obj is None :\n return '<%s object at %#x; dead>'%(type(self).__name__,id(self))\n else :\n return '<%s object at %#x; for %r at %#x>'%\\\n (type(self).__name__,id(self),type(obj).__name__,id(obj))\n \n @classmethod\n def _select_for_exit(cls):\n \n L=[(f,i)for (f,i)in cls._registry.items()if i.atexit]\n L.sort(key=lambda item:item[1].index)\n return [f for (f,i)in L]\n \n @classmethod\n def _exitfunc(cls):\n \n \n \n reenable_gc=False\n try :\n if cls._registry:\n import gc\n if gc.isenabled():\n reenable_gc=True\n gc.disable()\n pending=None\n while True :\n if pending is None or finalize._dirty:\n pending=cls._select_for_exit()\n finalize._dirty=False\n if not pending:\n break\n f=pending.pop()\n try :\n \n \n \n \n f()\n except Exception:\n sys.excepthook(*sys.exc_info())\n assert f not in cls._registry\n finally :\n \n finalize._shutdown=True\n if reenable_gc:\n gc.enable()\n", ["_collections_abc", "_weakref", "_weakrefset", "atexit", "copy", "gc", "itertools", "sys"]], "webbrowser": [".py", "\n\nfrom browser import window\n\ndef open(url,new=0,autoraise=True ):\n window.open(url)\n \ndef open_new(url):\n return window.open(url,\"_blank\")\n \ndef open_new_tab(url):\n return open(url)\n \n", ["browser"]], "zipfile": [".py", "''\n\n\n\n\nimport binascii\nimport importlib.util\nimport io\nimport itertools\nimport os\nimport posixpath\nimport shutil\nimport stat\nimport struct\nimport sys\nimport threading\nimport time\nimport contextlib\n\ntry :\n import zlib\n crc32=zlib.crc32\nexcept ImportError:\n zlib=None\n crc32=binascii.crc32\n \ntry :\n import bz2\nexcept ImportError:\n bz2=None\n \ntry :\n import lzma\nexcept ImportError:\n lzma=None\n \n__all__=[\"BadZipFile\",\"BadZipfile\",\"error\",\n\"ZIP_STORED\",\"ZIP_DEFLATED\",\"ZIP_BZIP2\",\"ZIP_LZMA\",\n\"is_zipfile\",\"ZipInfo\",\"ZipFile\",\"PyZipFile\",\"LargeZipFile\",\n\"Path\"]\n\nclass BadZipFile(Exception):\n pass\n \n \nclass LargeZipFile(Exception):\n ''\n\n\n \n \nerror=BadZipfile=BadZipFile\n\n\nZIP64_LIMIT=(1 <<31)-1\nZIP_FILECOUNT_LIMIT=(1 <<16)-1\nZIP_MAX_COMMENT=(1 <<16)-1\n\n\nZIP_STORED=0\nZIP_DEFLATED=8\nZIP_BZIP2=12\nZIP_LZMA=14\n\n\nDEFAULT_VERSION=20\nZIP64_VERSION=45\nBZIP2_VERSION=46\nLZMA_VERSION=63\n\nMAX_EXTRACT_VERSION=63\n\n\n\n\n\n\n\n\n\nstructEndArchive=b\"<4s4H2LH\"\nstringEndArchive=b\"PK\\005\\006\"\nsizeEndCentDir=struct.calcsize(structEndArchive)\n\n_ECD_SIGNATURE=0\n_ECD_DISK_NUMBER=1\n_ECD_DISK_START=2\n_ECD_ENTRIES_THIS_DISK=3\n_ECD_ENTRIES_TOTAL=4\n_ECD_SIZE=5\n_ECD_OFFSET=6\n_ECD_COMMENT_SIZE=7\n\n\n_ECD_COMMENT=8\n_ECD_LOCATION=9\n\n\n\nstructCentralDir=\"<4s4B4HL2L5H2L\"\nstringCentralDir=b\"PK\\001\\002\"\nsizeCentralDir=struct.calcsize(structCentralDir)\n\n\n_CD_SIGNATURE=0\n_CD_CREATE_VERSION=1\n_CD_CREATE_SYSTEM=2\n_CD_EXTRACT_VERSION=3\n_CD_EXTRACT_SYSTEM=4\n_CD_FLAG_BITS=5\n_CD_COMPRESS_TYPE=6\n_CD_TIME=7\n_CD_DATE=8\n_CD_CRC=9\n_CD_COMPRESSED_SIZE=10\n_CD_UNCOMPRESSED_SIZE=11\n_CD_FILENAME_LENGTH=12\n_CD_EXTRA_FIELD_LENGTH=13\n_CD_COMMENT_LENGTH=14\n_CD_DISK_NUMBER_START=15\n_CD_INTERNAL_FILE_ATTRIBUTES=16\n_CD_EXTERNAL_FILE_ATTRIBUTES=17\n_CD_LOCAL_HEADER_OFFSET=18\n\n\n\nstructFileHeader=\"<4s2B4HL2L2H\"\nstringFileHeader=b\"PK\\003\\004\"\nsizeFileHeader=struct.calcsize(structFileHeader)\n\n_FH_SIGNATURE=0\n_FH_EXTRACT_VERSION=1\n_FH_EXTRACT_SYSTEM=2\n_FH_GENERAL_PURPOSE_FLAG_BITS=3\n_FH_COMPRESSION_METHOD=4\n_FH_LAST_MOD_TIME=5\n_FH_LAST_MOD_DATE=6\n_FH_CRC=7\n_FH_COMPRESSED_SIZE=8\n_FH_UNCOMPRESSED_SIZE=9\n_FH_FILENAME_LENGTH=10\n_FH_EXTRA_FIELD_LENGTH=11\n\n\nstructEndArchive64Locator=\"<4sLQL\"\nstringEndArchive64Locator=b\"PK\\x06\\x07\"\nsizeEndCentDir64Locator=struct.calcsize(structEndArchive64Locator)\n\n\n\nstructEndArchive64=\"<4sQ2H2L4Q\"\nstringEndArchive64=b\"PK\\x06\\x06\"\nsizeEndCentDir64=struct.calcsize(structEndArchive64)\n\n_CD64_SIGNATURE=0\n_CD64_DIRECTORY_RECSIZE=1\n_CD64_CREATE_VERSION=2\n_CD64_EXTRACT_VERSION=3\n_CD64_DISK_NUMBER=4\n_CD64_DISK_NUMBER_START=5\n_CD64_NUMBER_ENTRIES_THIS_DISK=6\n_CD64_NUMBER_ENTRIES_TOTAL=7\n_CD64_DIRECTORY_SIZE=8\n_CD64_OFFSET_START_CENTDIR=9\n\n_DD_SIGNATURE=0x08074b50\n\n_EXTRA_FIELD_STRUCT=struct.Struct('1:\n raise BadZipFile(\"zipfiles that span multiple disks are not supported\")\n \n \n fpin.seek(offset -sizeEndCentDir64Locator -sizeEndCentDir64,2)\n data=fpin.read(sizeEndCentDir64)\n if len(data)!=sizeEndCentDir64:\n return endrec\n sig,sz,create_version,read_version,disk_num,disk_dir,\\\n dircount,dircount2,dirsize,diroffset=\\\n struct.unpack(structEndArchive64,data)\n if sig !=stringEndArchive64:\n return endrec\n \n \n endrec[_ECD_SIGNATURE]=sig\n endrec[_ECD_DISK_NUMBER]=disk_num\n endrec[_ECD_DISK_START]=disk_dir\n endrec[_ECD_ENTRIES_THIS_DISK]=dircount\n endrec[_ECD_ENTRIES_TOTAL]=dircount2\n endrec[_ECD_SIZE]=dirsize\n endrec[_ECD_OFFSET]=diroffset\n return endrec\n \n \ndef _EndRecData(fpin):\n ''\n\n\n \n \n \n fpin.seek(0,2)\n filesize=fpin.tell()\n \n \n \n \n try :\n fpin.seek(-sizeEndCentDir,2)\n except OSError:\n return None\n data=fpin.read()\n if (len(data)==sizeEndCentDir and\n data[0:4]==stringEndArchive and\n data[-2:]==b\"\\000\\000\"):\n \n endrec=struct.unpack(structEndArchive,data)\n endrec=list(endrec)\n \n \n endrec.append(b\"\")\n endrec.append(filesize -sizeEndCentDir)\n \n \n return _EndRecData64(fpin,-sizeEndCentDir,endrec)\n \n \n \n \n \n \n maxCommentStart=max(filesize -(1 <<16)-sizeEndCentDir,0)\n fpin.seek(maxCommentStart,0)\n data=fpin.read()\n start=data.rfind(stringEndArchive)\n if start >=0:\n \n recData=data[start:start+sizeEndCentDir]\n if len(recData)!=sizeEndCentDir:\n \n return None\n endrec=list(struct.unpack(structEndArchive,recData))\n commentSize=endrec[_ECD_COMMENT_SIZE]\n comment=data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]\n endrec.append(comment)\n endrec.append(maxCommentStart+start)\n \n \n return _EndRecData64(fpin,maxCommentStart+start -filesize,\n endrec)\n \n \n return None\n \n \nclass ZipInfo(object):\n ''\n \n __slots__=(\n 'orig_filename',\n 'filename',\n 'date_time',\n 'compress_type',\n '_compresslevel',\n 'comment',\n 'extra',\n 'create_system',\n 'create_version',\n 'extract_version',\n 'reserved',\n 'flag_bits',\n 'volume',\n 'internal_attr',\n 'external_attr',\n 'header_offset',\n 'CRC',\n 'compress_size',\n 'file_size',\n '_raw_time',\n )\n \n def __init__(self,filename=\"NoName\",date_time=(1980,1,1,0,0,0)):\n self.orig_filename=filename\n \n \n \n null_byte=filename.find(chr(0))\n if null_byte >=0:\n filename=filename[0:null_byte]\n \n \n \n if os.sep !=\"/\"and os.sep in filename:\n filename=filename.replace(os.sep,\"/\")\n \n self.filename=filename\n self.date_time=date_time\n \n if date_time[0]<1980:\n raise ValueError('ZIP does not support timestamps before 1980')\n \n \n self.compress_type=ZIP_STORED\n self._compresslevel=None\n self.comment=b\"\"\n self.extra=b\"\"\n if sys.platform =='win32':\n self.create_system=0\n else :\n \n self.create_system=3\n self.create_version=DEFAULT_VERSION\n self.extract_version=DEFAULT_VERSION\n self.reserved=0\n self.flag_bits=0\n self.volume=0\n self.internal_attr=0\n self.external_attr=0\n self.compress_size=0\n self.file_size=0\n \n \n \n \n def __repr__(self):\n result=['<%s filename=%r'%(self.__class__.__name__,self.filename)]\n if self.compress_type !=ZIP_STORED:\n result.append(' compress_type=%s'%\n compressor_names.get(self.compress_type,\n self.compress_type))\n hi=self.external_attr >>16\n lo=self.external_attr&0xFFFF\n if hi:\n result.append(' filemode=%r'%stat.filemode(hi))\n if lo:\n result.append(' external_attr=%#x'%lo)\n isdir=self.is_dir()\n if not isdir or self.file_size:\n result.append(' file_size=%r'%self.file_size)\n if ((not isdir or self.compress_size)and\n (self.compress_type !=ZIP_STORED or\n self.file_size !=self.compress_size)):\n result.append(' compress_size=%r'%self.compress_size)\n result.append('>')\n return ''.join(result)\n \n def FileHeader(self,zip64=None ):\n ''\n dt=self.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n if self.flag_bits&0x08:\n \n CRC=compress_size=file_size=0\n else :\n CRC=self.CRC\n compress_size=self.compress_size\n file_size=self.file_size\n \n extra=self.extra\n \n min_version=0\n if zip64 is None :\n zip64=file_size >ZIP64_LIMIT or compress_size >ZIP64_LIMIT\n if zip64:\n fmt='ZIP64_LIMIT or compress_size >ZIP64_LIMIT:\n if not zip64:\n raise LargeZipFile(\"Filesize would require ZIP64 extensions\")\n \n \n file_size=0xffffffff\n compress_size=0xffffffff\n min_version=ZIP64_VERSION\n \n if self.compress_type ==ZIP_BZIP2:\n min_version=max(BZIP2_VERSION,min_version)\n elif self.compress_type ==ZIP_LZMA:\n min_version=max(LZMA_VERSION,min_version)\n \n self.extract_version=max(min_version,self.extract_version)\n self.create_version=max(min_version,self.create_version)\n filename,flag_bits=self._encodeFilenameFlags()\n header=struct.pack(structFileHeader,stringFileHeader,\n self.extract_version,self.reserved,flag_bits,\n self.compress_type,dostime,dosdate,CRC,\n compress_size,file_size,\n len(filename),len(extra))\n return header+filename+extra\n \n def _encodeFilenameFlags(self):\n try :\n return self.filename.encode('ascii'),self.flag_bits\n except UnicodeEncodeError:\n return self.filename.encode('utf-8'),self.flag_bits |0x800\n \n def _decodeExtra(self):\n \n extra=self.extra\n unpack=struct.unpack\n while len(extra)>=4:\n tp,ln=unpack('len(extra):\n raise BadZipFile(\"Corrupt extra field %04x (size=%d)\"%(tp,ln))\n if tp ==0x0001:\n data=extra[4:ln+4]\n \n try :\n if self.file_size in (0xFFFF_FFFF_FFFF_FFFF,0xFFFF_FFFF):\n field=\"File size\"\n self.file_size,=unpack('2107:\n date_time=(2107,12,31,23,59,59)\n \n if arcname is None :\n arcname=filename\n arcname=os.path.normpath(os.path.splitdrive(arcname)[1])\n while arcname[0]in (os.sep,os.altsep):\n arcname=arcname[1:]\n if isdir:\n arcname +='/'\n zinfo=cls(arcname,date_time)\n zinfo.external_attr=(st.st_mode&0xFFFF)<<16\n if isdir:\n zinfo.file_size=0\n zinfo.external_attr |=0x10\n else :\n zinfo.file_size=st.st_size\n \n return zinfo\n \n def is_dir(self):\n ''\n return self.filename[-1]=='/'\n \n \n \n \n \n \n_crctable=None\ndef _gen_crc(crc):\n for j in range(8):\n if crc&1:\n crc=(crc >>1)^0xEDB88320\n else :\n crc >>=1\n return crc\n \n \n \n \n \n \n \n \n \ndef _ZipDecrypter(pwd):\n key0=305419896\n key1=591751049\n key2=878082192\n \n global _crctable\n if _crctable is None :\n _crctable=list(map(_gen_crc,range(256)))\n crctable=_crctable\n \n def crc32(ch,crc):\n ''\n return (crc >>8)^crctable[(crc ^ch)&0xFF]\n \n def update_keys(c):\n nonlocal key0,key1,key2\n key0=crc32(c,key0)\n key1=(key1+(key0&0xFF))&0xFFFFFFFF\n key1=(key1 *134775813+1)&0xFFFFFFFF\n key2=crc32(key1 >>24,key2)\n \n for p in pwd:\n update_keys(p)\n \n def decrypter(data):\n ''\n result=bytearray()\n append=result.append\n for c in data:\n k=key2 |2\n c ^=((k *(k ^1))>>8)&0xFF\n update_keys(c)\n append(c)\n return bytes(result)\n \n return decrypter\n \n \nclass LZMACompressor:\n\n def __init__(self):\n self._comp=None\n \n def _init(self):\n props=lzma._encode_filter_properties({'id':lzma.FILTER_LZMA1})\n self._comp=lzma.LZMACompressor(lzma.FORMAT_RAW,filters=[\n lzma._decode_filter_properties(lzma.FILTER_LZMA1,props)\n ])\n return struct.pack('>8)&0xff\n else :\n \n check_byte=(zipinfo.CRC >>24)&0xff\n h=self._init_decrypter()\n if h !=check_byte:\n raise RuntimeError(\"Bad password for file %r\"%zipinfo.orig_filename)\n \n \n def _init_decrypter(self):\n self._decrypter=_ZipDecrypter(self._pwd)\n \n \n \n \n \n header=self._fileobj.read(12)\n self._compress_left -=12\n return self._decrypter(header)[11]\n \n def __repr__(self):\n result=['<%s.%s'%(self.__class__.__module__,\n self.__class__.__qualname__)]\n if not self.closed:\n result.append(' name=%r mode=%r'%(self.name,self.mode))\n if self._compress_type !=ZIP_STORED:\n result.append(' compress_type=%s'%\n compressor_names.get(self._compress_type,\n self._compress_type))\n else :\n result.append(' [closed]')\n result.append('>')\n return ''.join(result)\n \n def readline(self,limit=-1):\n ''\n\n\n \n \n if limit <0:\n \n i=self._readbuffer.find(b'\\n',self._offset)+1\n if i >0:\n line=self._readbuffer[self._offset:i]\n self._offset=i\n return line\n \n return io.BufferedIOBase.readline(self,limit)\n \n def peek(self,n=1):\n ''\n if n >len(self._readbuffer)-self._offset:\n chunk=self.read(n)\n if len(chunk)>self._offset:\n self._readbuffer=chunk+self._readbuffer[self._offset:]\n self._offset=0\n else :\n self._offset -=len(chunk)\n \n \n return self._readbuffer[self._offset:self._offset+512]\n \n def readable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return True\n \n def read(self,n=-1):\n ''\n\n \n if self.closed:\n raise ValueError(\"read from closed file.\")\n if n is None or n <0:\n buf=self._readbuffer[self._offset:]\n self._readbuffer=b''\n self._offset=0\n while not self._eof:\n buf +=self._read1(self.MAX_N)\n return buf\n \n end=n+self._offset\n if end 0 and not self._eof:\n data=self._read1(n)\n if n 0:\n while not self._eof:\n data=self._read1(n)\n if n len(data):\n data +=self._read2(n -len(data))\n else :\n data=self._read2(n)\n \n if self._compress_type ==ZIP_STORED:\n self._eof=self._compress_left <=0\n elif self._compress_type ==ZIP_DEFLATED:\n n=max(n,self.MIN_READ_SIZE)\n data=self._decompressor.decompress(data,n)\n self._eof=(self._decompressor.eof or\n self._compress_left <=0 and\n not self._decompressor.unconsumed_tail)\n if self._eof:\n data +=self._decompressor.flush()\n else :\n data=self._decompressor.decompress(data)\n self._eof=self._decompressor.eof or self._compress_left <=0\n \n data=data[:self._left]\n self._left -=len(data)\n if self._left <=0:\n self._eof=True\n self._update_crc(data)\n return data\n \n def _read2(self,n):\n if self._compress_left <=0:\n return b''\n \n n=max(n,self.MIN_READ_SIZE)\n n=min(n,self._compress_left)\n \n data=self._fileobj.read(n)\n self._compress_left -=len(data)\n if not data:\n raise EOFError\n \n if self._decrypter is not None :\n data=self._decrypter(data)\n return data\n \n def close(self):\n try :\n if self._close_fileobj:\n self._fileobj.close()\n finally :\n super().close()\n \n def seekable(self):\n if self.closed:\n raise ValueError(\"I/O operation on closed file.\")\n return self._seekable\n \n def seek(self,offset,whence=0):\n if self.closed:\n raise ValueError(\"seek on closed file.\")\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n curr_pos=self.tell()\n if whence ==0:\n new_pos=offset\n elif whence ==1:\n new_pos=curr_pos+offset\n elif whence ==2:\n new_pos=self._orig_file_size+offset\n else :\n raise ValueError(\"whence must be os.SEEK_SET (0), \"\n \"os.SEEK_CUR (1), or os.SEEK_END (2)\")\n \n if new_pos >self._orig_file_size:\n new_pos=self._orig_file_size\n \n if new_pos <0:\n new_pos=0\n \n read_offset=new_pos -curr_pos\n buff_offset=read_offset+self._offset\n \n if buff_offset >=0 and buff_offset 0:\n read_len=min(self.MAX_SEEK_READ,read_offset)\n self.read(read_len)\n read_offset -=read_len\n \n return self.tell()\n \n def tell(self):\n if self.closed:\n raise ValueError(\"tell on closed file.\")\n if not self._seekable:\n raise io.UnsupportedOperation(\"underlying stream is not seekable\")\n filepos=self._orig_file_size -self._left -len(self._readbuffer)+self._offset\n return filepos\n \n \nclass _ZipWriteFile(io.BufferedIOBase):\n def __init__(self,zf,zinfo,zip64):\n self._zinfo=zinfo\n self._zip64=zip64\n self._zipfile=zf\n self._compressor=_get_compressor(zinfo.compress_type,\n zinfo._compresslevel)\n self._file_size=0\n self._compress_size=0\n self._crc=0\n \n @property\n def _fileobj(self):\n return self._zipfile.fp\n \n def writable(self):\n return True\n \n def write(self,data):\n if self.closed:\n raise ValueError('I/O operation on closed file.')\n nbytes=len(data)\n self._file_size +=nbytes\n self._crc=crc32(data,self._crc)\n if self._compressor:\n data=self._compressor.compress(data)\n self._compress_size +=len(data)\n self._fileobj.write(data)\n return nbytes\n \n def close(self):\n if self.closed:\n return\n try :\n super().close()\n \n if self._compressor:\n buf=self._compressor.flush()\n self._compress_size +=len(buf)\n self._fileobj.write(buf)\n self._zinfo.compress_size=self._compress_size\n else :\n self._zinfo.compress_size=self._file_size\n self._zinfo.CRC=self._crc\n self._zinfo.file_size=self._file_size\n \n \n if self._zinfo.flag_bits&0x08:\n \n fmt='ZIP64_LIMIT:\n raise RuntimeError(\n 'File size unexpectedly exceeded ZIP64 limit')\n if self._compress_size >ZIP64_LIMIT:\n raise RuntimeError(\n 'Compressed size unexpectedly exceeded ZIP64 limit')\n \n \n \n \n self._zipfile.start_dir=self._fileobj.tell()\n self._fileobj.seek(self._zinfo.header_offset)\n self._fileobj.write(self._zinfo.FileHeader(self._zip64))\n self._fileobj.seek(self._zipfile.start_dir)\n \n \n self._zipfile.filelist.append(self._zinfo)\n self._zipfile.NameToInfo[self._zinfo.filename]=self._zinfo\n finally :\n self._zipfile._writing=False\n \n \n \nclass ZipFile:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n fp=None\n _windows_illegal_name_trans_table=None\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,allowZip64=True ,\n compresslevel=None ,*,strict_timestamps=True ):\n ''\n \n if mode not in ('r','w','x','a'):\n raise ValueError(\"ZipFile requires mode 'r', 'w', 'x', or 'a'\")\n \n _check_compression(compression)\n \n self._allowZip64=allowZip64\n self._didModify=False\n self.debug=0\n self.NameToInfo={}\n self.filelist=[]\n self.compression=compression\n self.compresslevel=compresslevel\n self.mode=mode\n self.pwd=None\n self._comment=b''\n self._strict_timestamps=strict_timestamps\n \n \n if isinstance(file,os.PathLike):\n file=os.fspath(file)\n if isinstance(file,str):\n \n self._filePassed=0\n self.filename=file\n modeDict={'r':'rb','w':'w+b','x':'x+b','a':'r+b',\n 'r+b':'w+b','w+b':'wb','x+b':'xb'}\n filemode=modeDict[mode]\n while True :\n try :\n self.fp=io.open(file,filemode)\n except OSError:\n if filemode in modeDict:\n filemode=modeDict[filemode]\n continue\n raise\n break\n else :\n self._filePassed=1\n self.fp=file\n self.filename=getattr(file,'name',None )\n self._fileRefCnt=1\n self._lock=threading.RLock()\n self._seekable=True\n self._writing=False\n \n try :\n if mode =='r':\n self._RealGetContents()\n elif mode in ('w','x'):\n \n \n self._didModify=True\n try :\n self.start_dir=self.fp.tell()\n except (AttributeError,OSError):\n self.fp=_Tellable(self.fp)\n self.start_dir=0\n self._seekable=False\n else :\n \n try :\n self.fp.seek(self.start_dir)\n except (AttributeError,OSError):\n self._seekable=False\n elif mode =='a':\n try :\n \n self._RealGetContents()\n \n self.fp.seek(self.start_dir)\n except BadZipFile:\n \n self.fp.seek(0,2)\n \n \n \n self._didModify=True\n self.start_dir=self.fp.tell()\n else :\n raise ValueError(\"Mode must be 'r', 'w', 'x', or 'a'\")\n except :\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n raise\n \n def __enter__(self):\n return self\n \n def __exit__(self,type,value,traceback):\n self.close()\n \n def __repr__(self):\n result=['<%s.%s'%(self.__class__.__module__,\n self.__class__.__qualname__)]\n if self.fp is not None :\n if self._filePassed:\n result.append(' file=%r'%self.fp)\n elif self.filename is not None :\n result.append(' filename=%r'%self.filename)\n result.append(' mode=%r'%self.mode)\n else :\n result.append(' [closed]')\n result.append('>')\n return ''.join(result)\n \n def _RealGetContents(self):\n ''\n fp=self.fp\n try :\n endrec=_EndRecData(fp)\n except OSError:\n raise BadZipFile(\"File is not a zip file\")\n if not endrec:\n raise BadZipFile(\"File is not a zip file\")\n if self.debug >1:\n print(endrec)\n size_cd=endrec[_ECD_SIZE]\n offset_cd=endrec[_ECD_OFFSET]\n self._comment=endrec[_ECD_COMMENT]\n \n \n concat=endrec[_ECD_LOCATION]-size_cd -offset_cd\n if endrec[_ECD_SIGNATURE]==stringEndArchive64:\n \n concat -=(sizeEndCentDir64+sizeEndCentDir64Locator)\n \n if self.debug >2:\n inferred=concat+offset_cd\n print(\"given, inferred, offset\",offset_cd,inferred,concat)\n \n self.start_dir=offset_cd+concat\n fp.seek(self.start_dir,0)\n data=fp.read(size_cd)\n fp=io.BytesIO(data)\n total=0\n while total 2:\n print(centdir)\n filename=fp.read(centdir[_CD_FILENAME_LENGTH])\n flags=centdir[5]\n if flags&0x800:\n \n filename=filename.decode('utf-8')\n else :\n \n filename=filename.decode('cp437')\n \n x=ZipInfo(filename)\n x.extra=fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])\n x.comment=fp.read(centdir[_CD_COMMENT_LENGTH])\n x.header_offset=centdir[_CD_LOCAL_HEADER_OFFSET]\n (x.create_version,x.create_system,x.extract_version,x.reserved,\n x.flag_bits,x.compress_type,t,d,\n x.CRC,x.compress_size,x.file_size)=centdir[1:12]\n if x.extract_version >MAX_EXTRACT_VERSION:\n raise NotImplementedError(\"zip file version %.1f\"%\n (x.extract_version /10))\n x.volume,x.internal_attr,x.external_attr=centdir[15:18]\n \n x._raw_time=t\n x.date_time=((d >>9)+1980,(d >>5)&0xF,d&0x1F,\n t >>11,(t >>5)&0x3F,(t&0x1F)*2)\n \n x._decodeExtra()\n x.header_offset=x.header_offset+concat\n self.filelist.append(x)\n self.NameToInfo[x.filename]=x\n \n \n total=(total+sizeCentralDir+centdir[_CD_FILENAME_LENGTH]\n +centdir[_CD_EXTRA_FIELD_LENGTH]\n +centdir[_CD_COMMENT_LENGTH])\n \n if self.debug >2:\n print(\"total\",total)\n \n \n def namelist(self):\n ''\n return [data.filename for data in self.filelist]\n \n def infolist(self):\n ''\n \n return self.filelist\n \n def printdir(self,file=None ):\n ''\n print(\"%-46s %19s %12s\"%(\"File Name\",\"Modified \",\"Size\"),\n file=file)\n for zinfo in self.filelist:\n date=\"%d-%02d-%02d %02d:%02d:%02d\"%zinfo.date_time[:6]\n print(\"%-46s %s %12d\"%(zinfo.filename,date,zinfo.file_size),\n file=file)\n \n def testzip(self):\n ''\n chunk_size=2 **20\n for zinfo in self.filelist:\n try :\n \n \n with self.open(zinfo.filename,\"r\")as f:\n while f.read(chunk_size):\n pass\n except BadZipFile:\n return zinfo.filename\n \n def getinfo(self,name):\n ''\n info=self.NameToInfo.get(name)\n if info is None :\n raise KeyError(\n 'There is no item named %r in the archive'%name)\n \n return info\n \n def setpassword(self,pwd):\n ''\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if pwd:\n self.pwd=pwd\n else :\n self.pwd=None\n \n @property\n def comment(self):\n ''\n return self._comment\n \n @comment.setter\n def comment(self,comment):\n if not isinstance(comment,bytes):\n raise TypeError(\"comment: expected bytes, got %s\"%type(comment).__name__)\n \n if len(comment)>ZIP_MAX_COMMENT:\n import warnings\n warnings.warn('Archive comment is too long; truncating to %d bytes'\n %ZIP_MAX_COMMENT,stacklevel=2)\n comment=comment[:ZIP_MAX_COMMENT]\n self._comment=comment\n self._didModify=True\n \n def read(self,name,pwd=None ):\n ''\n with self.open(name,\"r\",pwd)as fp:\n return fp.read()\n \n def open(self,name,mode=\"r\",pwd=None ,*,force_zip64=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if mode not in {\"r\",\"w\"}:\n raise ValueError('open() requires mode \"r\" or \"w\"')\n if pwd and not isinstance(pwd,bytes):\n raise TypeError(\"pwd: expected bytes, got %s\"%type(pwd).__name__)\n if pwd and (mode ==\"w\"):\n raise ValueError(\"pwd is only supported for reading files\")\n if not self.fp:\n raise ValueError(\n \"Attempt to use ZIP archive that was already closed\")\n \n \n if isinstance(name,ZipInfo):\n \n zinfo=name\n elif mode =='w':\n zinfo=ZipInfo(name)\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n else :\n \n zinfo=self.getinfo(name)\n \n if mode =='w':\n return self._open_to_write(zinfo,force_zip64=force_zip64)\n \n if self._writing:\n raise ValueError(\"Can't read from the ZIP file while there \"\n \"is an open writing handle on it. \"\n \"Close the writing handle before trying to read.\")\n \n \n self._fileRefCnt +=1\n zef_file=_SharedFile(self.fp,zinfo.header_offset,\n self._fpclose,self._lock,lambda :self._writing)\n try :\n \n fheader=zef_file.read(sizeFileHeader)\n if len(fheader)!=sizeFileHeader:\n raise BadZipFile(\"Truncated file header\")\n fheader=struct.unpack(structFileHeader,fheader)\n if fheader[_FH_SIGNATURE]!=stringFileHeader:\n raise BadZipFile(\"Bad magic number for file header\")\n \n fname=zef_file.read(fheader[_FH_FILENAME_LENGTH])\n if fheader[_FH_EXTRA_FIELD_LENGTH]:\n zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])\n \n if zinfo.flag_bits&0x20:\n \n raise NotImplementedError(\"compressed patched data (flag bit 5)\")\n \n if zinfo.flag_bits&0x40:\n \n raise NotImplementedError(\"strong encryption (flag bit 6)\")\n \n if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS]&0x800:\n \n fname_str=fname.decode(\"utf-8\")\n else :\n fname_str=fname.decode(\"cp437\")\n \n if fname_str !=zinfo.orig_filename:\n raise BadZipFile(\n 'File name in directory %r and header %r differ.'\n %(zinfo.orig_filename,fname))\n \n \n is_encrypted=zinfo.flag_bits&0x1\n if is_encrypted:\n if not pwd:\n pwd=self.pwd\n if not pwd:\n raise RuntimeError(\"File %r is encrypted, password \"\n \"required for extraction\"%name)\n else :\n pwd=None\n \n return ZipExtFile(zef_file,mode,zinfo,pwd,True )\n except :\n zef_file.close()\n raise\n \n def _open_to_write(self,zinfo,force_zip64=False ):\n if force_zip64 and not self._allowZip64:\n raise ValueError(\n \"force_zip64 is True, but allowZip64 was False when opening \"\n \"the ZIP file.\"\n )\n if self._writing:\n raise ValueError(\"Can't write to the ZIP file while there is \"\n \"another write handle open on it. \"\n \"Close the first handle before opening another.\")\n \n \n zinfo.compress_size=0\n zinfo.CRC=0\n \n zinfo.flag_bits=0x00\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=0x02\n if not self._seekable:\n zinfo.flag_bits |=0x08\n \n if not zinfo.external_attr:\n zinfo.external_attr=0o600 <<16\n \n \n zip64=self._allowZip64 and\\\n (force_zip64 or zinfo.file_size *1.05 >ZIP64_LIMIT)\n \n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.fp.write(zinfo.FileHeader(zip64))\n \n self._writing=True\n return _ZipWriteFile(self,zinfo,zip64)\n \n def extract(self,member,path=None ,pwd=None ):\n ''\n\n\n\n \n if path is None :\n path=os.getcwd()\n else :\n path=os.fspath(path)\n \n return self._extract_member(member,path,pwd)\n \n def extractall(self,path=None ,members=None ,pwd=None ):\n ''\n\n\n\n \n if members is None :\n members=self.namelist()\n \n if path is None :\n path=os.getcwd()\n else :\n path=os.fspath(path)\n \n for zipinfo in members:\n self._extract_member(zipinfo,path,pwd)\n \n @classmethod\n def _sanitize_windows_name(cls,arcname,pathsep):\n ''\n table=cls._windows_illegal_name_trans_table\n if not table:\n illegal=':<>|\"?*'\n table=str.maketrans(illegal,'_'*len(illegal))\n cls._windows_illegal_name_trans_table=table\n arcname=arcname.translate(table)\n \n arcname=(x.rstrip('.')for x in arcname.split(pathsep))\n \n arcname=pathsep.join(x for x in arcname if x)\n return arcname\n \n def _extract_member(self,member,targetpath,pwd):\n ''\n\n \n if not isinstance(member,ZipInfo):\n member=self.getinfo(member)\n \n \n \n arcname=member.filename.replace('/',os.path.sep)\n \n if os.path.altsep:\n arcname=arcname.replace(os.path.altsep,os.path.sep)\n \n \n arcname=os.path.splitdrive(arcname)[1]\n invalid_path_parts=('',os.path.curdir,os.path.pardir)\n arcname=os.path.sep.join(x for x in arcname.split(os.path.sep)\n if x not in invalid_path_parts)\n if os.path.sep =='\\\\':\n \n arcname=self._sanitize_windows_name(arcname,os.path.sep)\n \n targetpath=os.path.join(targetpath,arcname)\n targetpath=os.path.normpath(targetpath)\n \n \n upperdirs=os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n \n if member.is_dir():\n if not os.path.isdir(targetpath):\n os.mkdir(targetpath)\n return targetpath\n \n with self.open(member,pwd=pwd)as source,\\\n open(targetpath,\"wb\")as target:\n shutil.copyfileobj(source,target)\n \n return targetpath\n \n def _writecheck(self,zinfo):\n ''\n if zinfo.filename in self.NameToInfo:\n import warnings\n warnings.warn('Duplicate name: %r'%zinfo.filename,stacklevel=3)\n if self.mode not in ('w','x','a'):\n raise ValueError(\"write() requires mode 'w', 'x', or 'a'\")\n if not self.fp:\n raise ValueError(\n \"Attempt to write ZIP archive that was already closed\")\n _check_compression(zinfo.compress_type)\n if not self._allowZip64:\n requires_zip64=None\n if len(self.filelist)>=ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif zinfo.file_size >ZIP64_LIMIT:\n requires_zip64=\"Filesize\"\n elif zinfo.header_offset >ZIP64_LIMIT:\n requires_zip64=\"Zipfile size\"\n if requires_zip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n \n def write(self,filename,arcname=None ,\n compress_type=None ,compresslevel=None ):\n ''\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists\"\n )\n \n zinfo=ZipInfo.from_file(filename,arcname,\n strict_timestamps=self._strict_timestamps)\n \n if zinfo.is_dir():\n zinfo.compress_size=0\n zinfo.CRC=0\n else :\n if compress_type is not None :\n zinfo.compress_type=compress_type\n else :\n zinfo.compress_type=self.compression\n \n if compresslevel is not None :\n zinfo._compresslevel=compresslevel\n else :\n zinfo._compresslevel=self.compresslevel\n \n if zinfo.is_dir():\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n zinfo.header_offset=self.fp.tell()\n if zinfo.compress_type ==ZIP_LZMA:\n \n zinfo.flag_bits |=0x02\n \n self._writecheck(zinfo)\n self._didModify=True\n \n self.filelist.append(zinfo)\n self.NameToInfo[zinfo.filename]=zinfo\n self.fp.write(zinfo.FileHeader(False ))\n self.start_dir=self.fp.tell()\n else :\n with open(filename,\"rb\")as src,self.open(zinfo,'w')as dest:\n shutil.copyfileobj(src,dest,1024 *8)\n \n def writestr(self,zinfo_or_arcname,data,\n compress_type=None ,compresslevel=None ):\n ''\n\n\n\n \n if isinstance(data,str):\n data=data.encode(\"utf-8\")\n if not isinstance(zinfo_or_arcname,ZipInfo):\n zinfo=ZipInfo(filename=zinfo_or_arcname,\n date_time=time.localtime(time.time())[:6])\n zinfo.compress_type=self.compression\n zinfo._compresslevel=self.compresslevel\n if zinfo.filename[-1]=='/':\n zinfo.external_attr=0o40775 <<16\n zinfo.external_attr |=0x10\n else :\n zinfo.external_attr=0o600 <<16\n else :\n zinfo=zinfo_or_arcname\n \n if not self.fp:\n raise ValueError(\n \"Attempt to write to ZIP archive that was already closed\")\n if self._writing:\n raise ValueError(\n \"Can't write to ZIP archive while an open writing handle exists.\"\n )\n \n if compress_type is not None :\n zinfo.compress_type=compress_type\n \n if compresslevel is not None :\n zinfo._compresslevel=compresslevel\n \n zinfo.file_size=len(data)\n with self._lock:\n with self.open(zinfo,mode='w')as dest:\n dest.write(data)\n \n def __del__(self):\n ''\n self.close()\n \n def close(self):\n ''\n \n if self.fp is None :\n return\n \n if self._writing:\n raise ValueError(\"Can't close the ZIP file while there is \"\n \"an open writing handle on it. \"\n \"Close the writing handle before closing the zip.\")\n \n try :\n if self.mode in ('w','x','a')and self._didModify:\n with self._lock:\n if self._seekable:\n self.fp.seek(self.start_dir)\n self._write_end_record()\n finally :\n fp=self.fp\n self.fp=None\n self._fpclose(fp)\n \n def _write_end_record(self):\n for zinfo in self.filelist:\n dt=zinfo.date_time\n dosdate=(dt[0]-1980)<<9 |dt[1]<<5 |dt[2]\n dostime=dt[3]<<11 |dt[4]<<5 |(dt[5]//2)\n extra=[]\n if zinfo.file_size >ZIP64_LIMIT\\\n or zinfo.compress_size >ZIP64_LIMIT:\n extra.append(zinfo.file_size)\n extra.append(zinfo.compress_size)\n file_size=0xffffffff\n compress_size=0xffffffff\n else :\n file_size=zinfo.file_size\n compress_size=zinfo.compress_size\n \n if zinfo.header_offset >ZIP64_LIMIT:\n extra.append(zinfo.header_offset)\n header_offset=0xffffffff\n else :\n header_offset=zinfo.header_offset\n \n extra_data=zinfo.extra\n min_version=0\n if extra:\n \n extra_data=_strip_extra(extra_data,(1,))\n extra_data=struct.pack(\n 'ZIP_FILECOUNT_LIMIT:\n requires_zip64=\"Files count\"\n elif centDirOffset >ZIP64_LIMIT:\n requires_zip64=\"Central directory offset\"\n elif centDirSize >ZIP64_LIMIT:\n requires_zip64=\"Central directory size\"\n if requires_zip64:\n \n if not self._allowZip64:\n raise LargeZipFile(requires_zip64+\n \" would require ZIP64 extensions\")\n zip64endrec=struct.pack(\n structEndArchive64,stringEndArchive64,\n 44,45,45,0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset)\n self.fp.write(zip64endrec)\n \n zip64locrec=struct.pack(\n structEndArchive64Locator,\n stringEndArchive64Locator,0,pos2,1)\n self.fp.write(zip64locrec)\n centDirCount=min(centDirCount,0xFFFF)\n centDirSize=min(centDirSize,0xFFFFFFFF)\n centDirOffset=min(centDirOffset,0xFFFFFFFF)\n \n endrec=struct.pack(structEndArchive,stringEndArchive,\n 0,0,centDirCount,centDirCount,\n centDirSize,centDirOffset,len(self._comment))\n self.fp.write(endrec)\n self.fp.write(self._comment)\n self.fp.flush()\n \n def _fpclose(self,fp):\n assert self._fileRefCnt >0\n self._fileRefCnt -=1\n if not self._fileRefCnt and not self._filePassed:\n fp.close()\n \n \nclass PyZipFile(ZipFile):\n ''\n \n def __init__(self,file,mode=\"r\",compression=ZIP_STORED,\n allowZip64=True ,optimize=-1):\n ZipFile.__init__(self,file,mode=mode,compression=compression,\n allowZip64=allowZip64)\n self._optimize=optimize\n \n def writepy(self,pathname,basename=\"\",filterfunc=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n pathname=os.fspath(pathname)\n if filterfunc and not filterfunc(pathname):\n if self.debug:\n label='path'if os.path.isdir(pathname)else 'file'\n print('%s %r skipped by filterfunc'%(label,pathname))\n return\n dir,name=os.path.split(pathname)\n if os.path.isdir(pathname):\n initname=os.path.join(pathname,\"__init__.py\")\n if os.path.isfile(initname):\n \n if basename:\n basename=\"%s/%s\"%(basename,name)\n else :\n basename=name\n if self.debug:\n print(\"Adding package in\",pathname,\"as\",basename)\n fname,arcname=self._get_codename(initname[0:-3],basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n dirlist=sorted(os.listdir(pathname))\n dirlist.remove(\"__init__.py\")\n \n for filename in dirlist:\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if os.path.isdir(path):\n if os.path.isfile(os.path.join(path,\"__init__.py\")):\n \n self.writepy(path,basename,\n filterfunc=filterfunc)\n elif ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else :\n \n if self.debug:\n print(\"Adding files from directory\",pathname)\n for filename in sorted(os.listdir(pathname)):\n path=os.path.join(pathname,filename)\n root,ext=os.path.splitext(filename)\n if ext ==\".py\":\n if filterfunc and not filterfunc(path):\n if self.debug:\n print('file %r skipped by filterfunc'%path)\n continue\n fname,arcname=self._get_codename(path[0:-3],\n basename)\n if self.debug:\n print(\"Adding\",arcname)\n self.write(fname,arcname)\n else :\n if pathname[-3:]!=\".py\":\n raise RuntimeError(\n 'Files added with writepy() must end with \".py\"')\n fname,arcname=self._get_codename(pathname[0:-3],basename)\n if self.debug:\n print(\"Adding file\",arcname)\n self.write(fname,arcname)\n \n def _get_codename(self,pathname,basename):\n ''\n\n\n\n\n \n def _compile(file,optimize=-1):\n import py_compile\n if self.debug:\n print(\"Compiling\",file)\n try :\n py_compile.compile(file,doraise=True ,optimize=optimize)\n except py_compile.PyCompileError as err:\n print(err.msg)\n return False\n return True\n \n file_py=pathname+\".py\"\n file_pyc=pathname+\".pyc\"\n pycache_opt0=importlib.util.cache_from_source(file_py,optimization='')\n pycache_opt1=importlib.util.cache_from_source(file_py,optimization=1)\n pycache_opt2=importlib.util.cache_from_source(file_py,optimization=2)\n if self._optimize ==-1:\n \n if (os.path.isfile(file_pyc)and\n os.stat(file_pyc).st_mtime >=os.stat(file_py).st_mtime):\n \n arcname=fname=file_pyc\n elif (os.path.isfile(pycache_opt0)and\n os.stat(pycache_opt0).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt0\n arcname=file_pyc\n elif (os.path.isfile(pycache_opt1)and\n os.stat(pycache_opt1).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt1\n arcname=file_pyc\n elif (os.path.isfile(pycache_opt2)and\n os.stat(pycache_opt2).st_mtime >=os.stat(file_py).st_mtime):\n \n \n fname=pycache_opt2\n arcname=file_pyc\n else :\n \n if _compile(file_py):\n if sys.flags.optimize ==0:\n fname=pycache_opt0\n elif sys.flags.optimize ==1:\n fname=pycache_opt1\n else :\n fname=pycache_opt2\n arcname=file_pyc\n else :\n fname=arcname=file_py\n else :\n \n if self._optimize ==0:\n fname=pycache_opt0\n arcname=file_pyc\n else :\n arcname=file_pyc\n if self._optimize ==1:\n fname=pycache_opt1\n elif self._optimize ==2:\n fname=pycache_opt2\n else :\n msg=\"invalid value for 'optimize': {!r}\".format(self._optimize)\n raise ValueError(msg)\n if not (os.path.isfile(fname)and\n os.stat(fname).st_mtime >=os.stat(file_py).st_mtime):\n if not _compile(file_py,optimize=self._optimize):\n fname=arcname=file_py\n archivename=os.path.split(arcname)[1]\n if basename:\n archivename=\"%s/%s\"%(basename,archivename)\n return (fname,archivename)\n \n \ndef _parents(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return itertools.islice(_ancestry(path),1,None )\n \n \ndef _ancestry(path):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n path=path.rstrip(posixpath.sep)\n while path and path !=posixpath.sep:\n yield path\n path,tail=posixpath.split(path)\n \n \n_dedupe=dict.fromkeys\n''\n\n\ndef _difference(minuend,subtrahend):\n ''\n\n\n \n return itertools.filterfalse(set(subtrahend).__contains__,minuend)\n \n \nclass CompleteDirs(ZipFile):\n ''\n\n\n \n \n @staticmethod\n def _implied_dirs(names):\n parents=itertools.chain.from_iterable(map(_parents,names))\n as_dirs=(p+posixpath.sep for p in parents)\n return _dedupe(_difference(as_dirs,names))\n \n def namelist(self):\n names=super(CompleteDirs,self).namelist()\n return names+list(self._implied_dirs(names))\n \n def _name_set(self):\n return set(self.namelist())\n \n def resolve_dir(self,name):\n ''\n\n\n \n names=self._name_set()\n dirname=name+'/'\n dir_match=name not in names and dirname in names\n return dirname if dir_match else name\n \n @classmethod\n def make(cls,source):\n ''\n\n\n \n if isinstance(source,CompleteDirs):\n return source\n \n if not isinstance(source,ZipFile):\n return cls(source)\n \n \n if 'r'not in source.mode:\n cls=CompleteDirs\n \n res=cls.__new__(cls)\n vars(res).update(vars(source))\n return res\n \n \nclass FastLookup(CompleteDirs):\n ''\n\n\n \n def namelist(self):\n with contextlib.suppress(AttributeError):\n return self.__names\n self.__names=super(FastLookup,self).namelist()\n return self.__names\n \n def _name_set(self):\n with contextlib.suppress(AttributeError):\n return self.__lookup\n self.__lookup=super(FastLookup,self)._name_set()\n return self.__lookup\n \n \nclass Path:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n __repr=\"{self.__class__.__name__}({self.root.filename!r}, {self.at!r})\"\n \n def __init__(self,root,at=\"\"):\n self.root=FastLookup.make(root)\n self.at=at\n \n def open(self,mode='r',*args,**kwargs):\n ''\n\n\n\n \n pwd=kwargs.pop('pwd',None )\n zip_mode=mode[0]\n stream=self.root.open(self.at,zip_mode,pwd=pwd)\n if 'b'in mode:\n if args or kwargs:\n raise ValueError(\"encoding args invalid for binary operation\")\n return stream\n return io.TextIOWrapper(stream,*args,**kwargs)\n \n @property\n def name(self):\n return posixpath.basename(self.at.rstrip(\"/\"))\n \n def read_text(self,*args,**kwargs):\n with self.open('r',*args,**kwargs)as strm:\n return strm.read()\n \n def read_bytes(self):\n with self.open('rb')as strm:\n return strm.read()\n \n def _is_child(self,path):\n return posixpath.dirname(path.at.rstrip(\"/\"))==self.at.rstrip(\"/\")\n \n def _next(self,at):\n return Path(self.root,at)\n \n def is_dir(self):\n return not self.at or self.at.endswith(\"/\")\n \n def is_file(self):\n return not self.is_dir()\n \n def exists(self):\n return self.at in self.root._name_set()\n \n def iterdir(self):\n if not self.is_dir():\n raise ValueError(\"Can't listdir a file\")\n subs=map(self._next,self.root.namelist())\n return filter(self._is_child,subs)\n \n def __str__(self):\n return posixpath.join(self.root.filename,self.at)\n \n def __repr__(self):\n return self.__repr.format(self=self)\n \n def joinpath(self,add):\n next=posixpath.join(self.at,add)\n return self._next(self.root.resolve_dir(next))\n \n __truediv__=joinpath\n \n @property\n def parent(self):\n parent_at=posixpath.dirname(self.at.rstrip('/'))\n if parent_at:\n parent_at +='/'\n return self._next(parent_at)\n \n \ndef main(args=None ):\n import argparse\n \n description='A simple command-line interface for zipfile module.'\n parser=argparse.ArgumentParser(description=description)\n group=parser.add_mutually_exclusive_group(required=True )\n group.add_argument('-l','--list',metavar='',\n help='Show listing of a zipfile')\n group.add_argument('-e','--extract',nargs=2,\n metavar=('',''),\n help='Extract zipfile into target dir')\n group.add_argument('-c','--create',nargs='+',\n metavar=('',''),\n help='Create zipfile from sources')\n group.add_argument('-t','--test',metavar='',\n help='Test if a zipfile is valid')\n args=parser.parse_args(args)\n \n if args.test is not None :\n src=args.test\n with ZipFile(src,'r')as zf:\n badfile=zf.testzip()\n if badfile:\n print(\"The following enclosed file is corrupted: {!r}\".format(badfile))\n print(\"Done testing\")\n \n elif args.list is not None :\n src=args.list\n with ZipFile(src,'r')as zf:\n zf.printdir()\n \n elif args.extract is not None :\n src,curdir=args.extract\n with ZipFile(src,'r')as zf:\n zf.extractall(curdir)\n \n elif args.create is not None :\n zip_name=args.create.pop(0)\n files=args.create\n \n def addToZip(zf,path,zippath):\n if os.path.isfile(path):\n zf.write(path,zippath,ZIP_DEFLATED)\n elif os.path.isdir(path):\n if zippath:\n zf.write(path,zippath)\n for nm in sorted(os.listdir(path)):\n addToZip(zf,\n os.path.join(path,nm),os.path.join(zippath,nm))\n \n \n with ZipFile(zip_name,'w')as zf:\n for path in files:\n zippath=os.path.basename(path)\n if not zippath:\n zippath=os.path.basename(os.path.dirname(path))\n if zippath in ('',os.curdir,os.pardir):\n zippath=''\n addToZip(zf,path,zippath)\n \n \nif __name__ ==\"__main__\":\n main()\n", ["argparse", "binascii", "bz2", "contextlib", "importlib.util", "io", "itertools", "lzma", "os", "posixpath", "py_compile", "shutil", "stat", "struct", "sys", "threading", "time", "warnings", "zlib"]], "zipimport": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport _frozen_importlib_external as _bootstrap_external\nfrom _frozen_importlib_external import _unpack_uint16,_unpack_uint32\nimport _frozen_importlib as _bootstrap\nimport _imp\nimport _io\nimport marshal\nimport sys\nimport time\n\n__all__=['ZipImportError','zipimporter']\n\n\npath_sep=_bootstrap_external.path_sep\nalt_path_sep=_bootstrap_external.path_separators[1:]\n\n\nclass ZipImportError(ImportError):\n pass\n \n \n_zip_directory_cache={}\n\n_module_type=type(sys)\n\nEND_CENTRAL_DIR_SIZE=22\nSTRING_END_ARCHIVE=b'PK\\x05\\x06'\nMAX_COMMENT_LEN=(1 <<16)-1\n\nclass zipimporter:\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n def __init__(self,path):\n if not isinstance(path,str):\n import os\n path=os.fsdecode(path)\n if not path:\n raise ZipImportError('archive path is empty',path=path)\n if alt_path_sep:\n path=path.replace(alt_path_sep,path_sep)\n \n prefix=[]\n while True :\n try :\n st=_bootstrap_external._path_stat(path)\n except (OSError,ValueError):\n \n \n dirname,basename=_bootstrap_external._path_split(path)\n if dirname ==path:\n raise ZipImportError('not a Zip file',path=path)\n path=dirname\n prefix.append(basename)\n else :\n \n if (st.st_mode&0o170000)!=0o100000:\n \n raise ZipImportError('not a Zip file',path=path)\n break\n \n try :\n files=_zip_directory_cache[path]\n except KeyError:\n files=_read_directory(path)\n _zip_directory_cache[path]=files\n self._files=files\n self.archive=path\n \n self.prefix=_bootstrap_external._path_join(*prefix[::-1])\n if self.prefix:\n self.prefix +=path_sep\n \n \n \n \n \n \n \n def find_loader(self,fullname,path=None ):\n ''\n\n\n\n\n\n\n\n \n mi=_get_module_info(self,fullname)\n if mi is not None :\n \n return self,[]\n \n \n \n \n \n \n modpath=_get_module_path(self,fullname)\n if _is_dir(self,modpath):\n \n \n \n return None ,[f'{self.archive}{path_sep}{modpath}']\n \n return None ,[]\n \n \n \n \n def find_module(self,fullname,path=None ):\n ''\n\n\n\n\n\n\n \n return self.find_loader(fullname,path)[0]\n \n \n def get_code(self,fullname):\n ''\n\n\n\n \n code,ispackage,modpath=_get_module_code(self,fullname)\n return code\n \n \n def get_data(self,pathname):\n ''\n\n\n\n \n if alt_path_sep:\n pathname=pathname.replace(alt_path_sep,path_sep)\n \n key=pathname\n if pathname.startswith(self.archive+path_sep):\n key=pathname[len(self.archive+path_sep):]\n \n try :\n toc_entry=self._files[key]\n except KeyError:\n raise OSError(0,'',key)\n return _get_data(self.archive,toc_entry)\n \n \n \n def get_filename(self,fullname):\n ''\n\n\n \n \n \n code,ispackage,modpath=_get_module_code(self,fullname)\n return modpath\n \n \n def get_source(self,fullname):\n ''\n\n\n\n\n \n mi=_get_module_info(self,fullname)\n if mi is None :\n raise ZipImportError(f\"can't find module {fullname!r}\",name=fullname)\n \n path=_get_module_path(self,fullname)\n if mi:\n fullpath=_bootstrap_external._path_join(path,'__init__.py')\n else :\n fullpath=f'{path}.py'\n \n try :\n toc_entry=self._files[fullpath]\n except KeyError:\n \n return None\n return _get_data(self.archive,toc_entry).decode()\n \n \n \n def is_package(self,fullname):\n ''\n\n\n\n \n mi=_get_module_info(self,fullname)\n if mi is None :\n raise ZipImportError(f\"can't find module {fullname!r}\",name=fullname)\n return mi\n \n \n \n def load_module(self,fullname):\n ''\n\n\n\n\n \n code,ispackage,modpath=_get_module_code(self,fullname)\n mod=sys.modules.get(fullname)\n if mod is None or not isinstance(mod,_module_type):\n mod=_module_type(fullname)\n sys.modules[fullname]=mod\n mod.__loader__=self\n \n try :\n if ispackage:\n \n \n path=_get_module_path(self,fullname)\n fullpath=_bootstrap_external._path_join(self.archive,path)\n mod.__path__=[fullpath]\n \n if not hasattr(mod,'__builtins__'):\n mod.__builtins__=__builtins__\n _bootstrap_external._fix_up_module(mod.__dict__,fullname,modpath)\n exec(code,mod.__dict__)\n except :\n del sys.modules[fullname]\n raise\n \n try :\n mod=sys.modules[fullname]\n except KeyError:\n raise ImportError(f'Loaded module {fullname!r} not found in sys.modules')\n _bootstrap._verbose_message('import {} # loaded from Zip {}',fullname,modpath)\n return mod\n \n \n def get_resource_reader(self,fullname):\n ''\n\n\n\n \n try :\n if not self.is_package(fullname):\n return None\n except ZipImportError:\n return None\n if not _ZipImportResourceReader._registered:\n from importlib.abc import ResourceReader\n ResourceReader.register(_ZipImportResourceReader)\n _ZipImportResourceReader._registered=True\n return _ZipImportResourceReader(self,fullname)\n \n \n def __repr__(self):\n return f''\n \n \n \n \n \n \n \n_zip_searchorder=(\n(path_sep+'__init__.pyc',True ,True ),\n(path_sep+'__init__.py',False ,True ),\n('.pyc',True ,False ),\n('.py',False ,False ),\n)\n\n\n\ndef _get_module_path(self,fullname):\n return self.prefix+fullname.rpartition('.')[2]\n \n \ndef _is_dir(self,path):\n\n\n\n dirpath=path+path_sep\n \n return dirpath in self._files\n \n \ndef _get_module_info(self,fullname):\n path=_get_module_path(self,fullname)\n for suffix,isbytecode,ispackage in _zip_searchorder:\n fullpath=path+suffix\n if fullpath in self._files:\n return ispackage\n return None\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef _read_directory(archive):\n try :\n fp=_io.open_code(archive)\n except OSError:\n raise ZipImportError(f\"can't open Zip file: {archive!r}\",path=archive)\n \n with fp:\n try :\n fp.seek(-END_CENTRAL_DIR_SIZE,2)\n header_position=fp.tell()\n buffer=fp.read(END_CENTRAL_DIR_SIZE)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n if len(buffer)!=END_CENTRAL_DIR_SIZE:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n if buffer[:4]!=STRING_END_ARCHIVE:\n \n \n try :\n fp.seek(0,2)\n file_size=fp.tell()\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",\n path=archive)\n max_comment_start=max(file_size -MAX_COMMENT_LEN -\n END_CENTRAL_DIR_SIZE,0)\n try :\n fp.seek(max_comment_start)\n data=fp.read()\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",\n path=archive)\n pos=data.rfind(STRING_END_ARCHIVE)\n if pos <0:\n raise ZipImportError(f'not a Zip file: {archive!r}',\n path=archive)\n buffer=data[pos:pos+END_CENTRAL_DIR_SIZE]\n if len(buffer)!=END_CENTRAL_DIR_SIZE:\n raise ZipImportError(f\"corrupt Zip file: {archive!r}\",\n path=archive)\n header_position=file_size -len(data)+pos\n \n header_size=_unpack_uint32(buffer[12:16])\n header_offset=_unpack_uint32(buffer[16:20])\n if header_position header_offset:\n raise ZipImportError(f'bad local header offset: {archive!r}',path=archive)\n file_offset +=arc_offset\n \n try :\n name=fp.read(name_size)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n if len(name)!=name_size:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n \n \n \n try :\n if len(fp.read(header_size -name_size))!=header_size -name_size:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n \n if flags&0x800:\n \n name=name.decode()\n else :\n \n try :\n name=name.decode('ascii')\n except UnicodeDecodeError:\n name=name.decode('latin1').translate(cp437_table)\n \n name=name.replace('/',path_sep)\n path=_bootstrap_external._path_join(archive,name)\n t=(path,compress,data_size,file_size,file_offset,time,date,crc)\n files[name]=t\n count +=1\n _bootstrap._verbose_message('zipimport: found {} names in {!r}',count,archive)\n return files\n \n \n \n \n \n \n \ncp437_table=(\n\n'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f'\n'\\x10\\x11\\x12\\x13\\x14\\x15\\x16\\x17\\x18\\x19\\x1a\\x1b\\x1c\\x1d\\x1e\\x1f'\n' !\"#$%&\\'()*+,-./'\n'0123456789:;<=>?'\n'@ABCDEFGHIJKLMNO'\n'PQRSTUVWXYZ[\\\\]^_'\n'`abcdefghijklmno'\n'pqrstuvwxyz{|}~\\x7f'\n\n'\\xc7\\xfc\\xe9\\xe2\\xe4\\xe0\\xe5\\xe7'\n'\\xea\\xeb\\xe8\\xef\\xee\\xec\\xc4\\xc5'\n'\\xc9\\xe6\\xc6\\xf4\\xf6\\xf2\\xfb\\xf9'\n'\\xff\\xd6\\xdc\\xa2\\xa3\\xa5\\u20a7\\u0192'\n'\\xe1\\xed\\xf3\\xfa\\xf1\\xd1\\xaa\\xba'\n'\\xbf\\u2310\\xac\\xbd\\xbc\\xa1\\xab\\xbb'\n'\\u2591\\u2592\\u2593\\u2502\\u2524\\u2561\\u2562\\u2556'\n'\\u2555\\u2563\\u2551\\u2557\\u255d\\u255c\\u255b\\u2510'\n'\\u2514\\u2534\\u252c\\u251c\\u2500\\u253c\\u255e\\u255f'\n'\\u255a\\u2554\\u2569\\u2566\\u2560\\u2550\\u256c\\u2567'\n'\\u2568\\u2564\\u2565\\u2559\\u2558\\u2552\\u2553\\u256b'\n'\\u256a\\u2518\\u250c\\u2588\\u2584\\u258c\\u2590\\u2580'\n'\\u03b1\\xdf\\u0393\\u03c0\\u03a3\\u03c3\\xb5\\u03c4'\n'\\u03a6\\u0398\\u03a9\\u03b4\\u221e\\u03c6\\u03b5\\u2229'\n'\\u2261\\xb1\\u2265\\u2264\\u2320\\u2321\\xf7\\u2248'\n'\\xb0\\u2219\\xb7\\u221a\\u207f\\xb2\\u25a0\\xa0'\n)\n\n_importing_zlib=False\n\n\n\n\ndef _get_decompress_func():\n global _importing_zlib\n if _importing_zlib:\n \n \n _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')\n raise ZipImportError(\"can't decompress data; zlib not available\")\n \n _importing_zlib=True\n try :\n from zlib import decompress\n except Exception:\n _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')\n raise ZipImportError(\"can't decompress data; zlib not available\")\n finally :\n _importing_zlib=False\n \n _bootstrap._verbose_message('zipimport: zlib available')\n return decompress\n \n \ndef _get_data(archive,toc_entry):\n datapath,compress,data_size,file_size,file_offset,time,date,crc=toc_entry\n if data_size <0:\n raise ZipImportError('negative data size')\n \n with _io.open_code(archive)as fp:\n \n try :\n fp.seek(file_offset)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n buffer=fp.read(30)\n if len(buffer)!=30:\n raise EOFError('EOF read where not expected')\n \n if buffer[:4]!=b'PK\\x03\\x04':\n \n raise ZipImportError(f'bad local file header: {archive!r}',path=archive)\n \n name_size=_unpack_uint16(buffer[26:28])\n extra_size=_unpack_uint16(buffer[28:30])\n header_size=30+name_size+extra_size\n file_offset +=header_size\n try :\n fp.seek(file_offset)\n except OSError:\n raise ZipImportError(f\"can't read Zip file: {archive!r}\",path=archive)\n raw_data=fp.read(data_size)\n if len(raw_data)!=data_size:\n raise OSError(\"zipimport: can't read data\")\n \n if compress ==0:\n \n return raw_data\n \n \n try :\n decompress=_get_decompress_func()\n except Exception:\n raise ZipImportError(\"can't decompress data; zlib not available\")\n return decompress(raw_data,-15)\n \n \n \n \n \ndef _eq_mtime(t1,t2):\n\n return abs(t1 -t2)<=1\n \n \n \n \n \n \n \ndef _unmarshal_code(self,pathname,fullpath,fullname,data):\n exc_details={\n 'name':fullname,\n 'path':fullpath,\n }\n \n try :\n flags=_bootstrap_external._classify_pyc(data,fullname,exc_details)\n except ImportError:\n return None\n \n hash_based=flags&0b1 !=0\n if hash_based:\n check_source=flags&0b10 !=0\n if (_imp.check_hash_based_pycs !='never'and\n (check_source or _imp.check_hash_based_pycs =='always')):\n source_bytes=_get_pyc_source(self,fullpath)\n if source_bytes is not None :\n source_hash=_imp.source_hash(\n _bootstrap_external._RAW_MAGIC_NUMBER,\n source_bytes,\n )\n \n try :\n _bootstrap_external._validate_hash_pyc(\n data,source_hash,fullname,exc_details)\n except ImportError:\n return None\n else :\n source_mtime,source_size=\\\n _get_mtime_and_size_of_source(self,fullpath)\n \n if source_mtime:\n \n \n if (not _eq_mtime(_unpack_uint32(data[8:12]),source_mtime)or\n _unpack_uint32(data[12:16])!=source_size):\n _bootstrap._verbose_message(\n f'bytecode is stale for {fullname!r}')\n return None\n \n code=marshal.loads(data[16:])\n if not isinstance(code,_code_type):\n raise TypeError(f'compiled module {pathname!r} is not a code object')\n return code\n \n_code_type=type(_unmarshal_code.__code__)\n\n\n\n\ndef _normalize_line_endings(source):\n source=source.replace(b'\\r\\n',b'\\n')\n source=source.replace(b'\\r',b'\\n')\n return source\n \n \n \ndef _compile_source(pathname,source):\n source=_normalize_line_endings(source)\n return compile(source,pathname,'exec',dont_inherit=True )\n \n \n \ndef _parse_dostime(d,t):\n return time.mktime((\n (d >>9)+1980,\n (d >>5)&0xF,\n d&0x1F,\n t >>11,\n (t >>5)&0x3F,\n (t&0x1F)*2,\n -1,-1,-1))\n \n \n \n \ndef _get_mtime_and_size_of_source(self,path):\n try :\n \n assert path[-1:]in ('c','o')\n path=path[:-1]\n toc_entry=self._files[path]\n \n \n time=toc_entry[5]\n date=toc_entry[6]\n uncompressed_size=toc_entry[3]\n return _parse_dostime(date,time),uncompressed_size\n except (KeyError,IndexError,TypeError):\n return 0,0\n \n \n \n \n \ndef _get_pyc_source(self,path):\n\n assert path[-1:]in ('c','o')\n path=path[:-1]\n \n try :\n toc_entry=self._files[path]\n except KeyError:\n return None\n else :\n return _get_data(self.archive,toc_entry)\n \n \n \n \ndef _get_module_code(self,fullname):\n path=_get_module_path(self,fullname)\n for suffix,isbytecode,ispackage in _zip_searchorder:\n fullpath=path+suffix\n _bootstrap._verbose_message('trying {}{}{}',self.archive,path_sep,fullpath,verbosity=2)\n try :\n toc_entry=self._files[fullpath]\n except KeyError:\n pass\n else :\n modpath=toc_entry[0]\n data=_get_data(self.archive,toc_entry)\n if isbytecode:\n code=_unmarshal_code(self,modpath,fullpath,fullname,data)\n else :\n code=_compile_source(modpath,data)\n if code is None :\n \n \n continue\n modpath=toc_entry[0]\n return code,ispackage,modpath\n else :\n raise ZipImportError(f\"can't find module {fullname!r}\",name=fullname)\n \n \nclass _ZipImportResourceReader:\n ''\n\n\n\n \n _registered=False\n \n def __init__(self,zipimporter,fullname):\n self.zipimporter=zipimporter\n self.fullname=fullname\n \n def open_resource(self,resource):\n fullname_as_path=self.fullname.replace('.','/')\n path=f'{fullname_as_path}/{resource}'\n from io import BytesIO\n try :\n return BytesIO(self.zipimporter.get_data(path))\n except OSError:\n raise FileNotFoundError(path)\n \n def resource_path(self,resource):\n \n \n \n raise FileNotFoundError\n \n def is_resource(self,name):\n \n \n fullname_as_path=self.fullname.replace('.','/')\n path=f'{fullname_as_path}/{name}'\n try :\n self.zipimporter.get_data(path)\n except OSError:\n return False\n return True\n \n def contents(self):\n \n \n \n \n \n \n \n from pathlib import Path\n fullname_path=Path(self.zipimporter.get_filename(self.fullname))\n relative_path=fullname_path.relative_to(self.zipimporter.archive)\n \n \n assert relative_path.name =='__init__.py'\n package_path=relative_path.parent\n subdirs_seen=set()\n for filename in self.zipimporter._files:\n try :\n relative=Path(filename).relative_to(package_path)\n except ValueError:\n continue\n \n \n \n \n parent_name=relative.parent.name\n if len(parent_name)==0:\n yield relative.name\n elif parent_name not in subdirs_seen:\n subdirs_seen.add(parent_name)\n yield parent_name\n", ["_frozen_importlib", "_frozen_importlib_external", "_imp", "_io", "importlib.abc", "io", "marshal", "os", "pathlib", "sys", "time", "zlib"]], "zlib": [".py", "from _zlib_utils import lz_generator,crc32\n\nclass BitIO:\n\n def __init__(self,bytestream=b''):\n self.bytestream=bytearray(bytestream)\n self.bytenum=0\n self.bitnum=0\n \n @property\n def pos(self):\n return self.bytenum *8+self.bitnum\n \n def read(self,nb,order=\"lsf\",trace=False ):\n result=0\n coef=1 if order ==\"lsf\"else 2 **(nb -1)\n for _ in range(nb):\n if self.bitnum ==8:\n if self.bytenum ==len(self.bytestream)-1:\n return None\n self.bytenum +=1\n self.bitnum=0\n mask=2 **self.bitnum\n if trace:\n print(\"bit\",int(bool(mask&self.bytestream[self.bytenum])))\n result +=coef *bool(mask&self.bytestream[self.bytenum])\n self.bitnum +=1\n if order ==\"lsf\":\n coef *=2\n else :\n coef //=2\n return result\n \n def move(self,nb):\n if nb ==0:\n return\n elif nb >0:\n bitpos=self.bitnum+nb\n while bitpos >7:\n self.bytenum +=1\n if self.bytenum ==len(self.bytestream):\n raise Exception(\"can't move {} bits\".format(nb))\n bitpos -=8\n self.bitnum=bitpos\n else :\n bitpos=self.bitnum+nb\n while bitpos <0:\n self.bytenum -=1\n if self.bytenum ==-1:\n raise Exception(\"can't move {} bits\".format(nb))\n bitpos +=8\n self.bitnum=bitpos\n \n def show(self):\n res=\"\"\n for x in self.bytestream:\n s=str(bin(x))[2:]\n s=\"0\"*(8 -len(s))+s\n res +=s+\" \"\n return res\n \n def write(self,*bits):\n for bit in bits:\n if not self.bytestream:\n self.bytestream.append(0)\n byte=self.bytestream[self.bytenum]\n if self.bitnum ==8:\n if self.bytenum ==len(self.bytestream)-1:\n byte=0\n self.bytestream +=bytes([byte])\n self.bytenum +=1\n self.bitnum=0\n mask=2 **self.bitnum\n if bit:\n byte |=mask\n else :\n byte &=~mask\n self.bytestream[self.bytenum]=byte\n self.bitnum +=1\n \n def write_int(self,value,nb,order=\"lsf\"):\n ''\n if value >=2 **nb:\n raise ValueError(\"can't write value on {} bits\".format(nb))\n bits=[]\n while value:\n bits.append(value&1)\n value >>=1\n \n bits=bits+[0]*(nb -len(bits))\n if order !=\"lsf\":\n bits.reverse()\n assert len(bits)==nb\n self.write(*bits)\n \nclass ResizeError(Exception):\n pass\n \n \nclass Node:\n\n def __init__(self,char=None ,weight=0,level=0):\n self.char=char\n self.is_leaf=char is not None\n self.level=level\n self.weight=weight\n \n def add(self,children):\n self.children=children\n for child in self.children:\n child.parent=self\n child.level=self.level+1\n \n \nclass Tree:\n\n def __init__(self,root):\n self.root=root\n \n def length(self):\n self.root.level=0\n node=self.root\n nb_levels=0\n def set_level(node):\n nonlocal nb_levels\n for child in node.children:\n child.level=node.level+1\n nb_levels=max(nb_levels,child.level)\n if not child.is_leaf:\n set_level(child)\n set_level(self.root)\n return nb_levels\n \n def reduce_tree(self):\n ''\n\n\n \n currentlen=self.length()\n deepest=self.nodes_at(currentlen)\n deepest_leaves=[node for node in deepest if node.is_leaf]\n rightmost_leaf=deepest_leaves[-1]\n sibling=rightmost_leaf.parent.children[0]\n \n \n parent=rightmost_leaf.parent\n grand_parent=parent.parent\n rank=grand_parent.children.index(parent)\n children=grand_parent.children\n children[rank]=rightmost_leaf\n grand_parent.add(children)\n \n \n up_level=rightmost_leaf.level -2\n while up_level >0:\n nodes=self.nodes_at(up_level)\n leaf_nodes=[node for node in nodes if node.is_leaf]\n if leaf_nodes:\n leftmost_leaf=leaf_nodes[0]\n \n parent=leftmost_leaf.parent\n rank=parent.children.index(leftmost_leaf)\n new_node=Node()\n new_node.level=leftmost_leaf.level\n children=[sibling,leftmost_leaf]\n new_node.add(children)\n parent.children[rank]=new_node\n new_node.parent=parent\n break\n else :\n up_level -=1\n if up_level ==0:\n raise ResizeError\n \n def nodes_at(self,level,top=None ):\n ''\n res=[]\n if top is None :\n top=self.root\n if top.level ==level:\n res=[top]\n elif not top.is_leaf:\n for child in top.children:\n res +=self.nodes_at(level,child)\n return res\n \n def reduce(self,maxlevels):\n ''\n while self.length()>maxlevels:\n self.reduce_tree()\n \n def codes(self,node=None ,code=''):\n ''\n \n if node is None :\n self.dic={}\n node=self.root\n if node.is_leaf:\n self.dic[node.char]=code\n else :\n for i,child in enumerate(node.children):\n self.codes(child,code+str(i))\n return self.dic\n \n \ndef codelengths_from_frequencies(freqs):\n ''\n\n\n\n \n freqs=sorted(freqs.items(),\n key=lambda item:(item[1],-item[0]),reverse=True )\n nodes=[Node(char=key,weight=value)for (key,value)in freqs]\n while len(nodes)>1:\n right,left=nodes.pop(),nodes.pop()\n node=Node(weight=right.weight+left.weight)\n node.add([left,right])\n if not nodes:\n nodes.append(node)\n else :\n pos=0\n while pos node.weight:\n pos +=1\n nodes.insert(pos,node)\n \n top=nodes[0]\n tree=Tree(top)\n tree.reduce(15)\n \n codes=tree.codes()\n \n code_items=list(codes.items())\n code_items.sort(key=lambda item:(len(item[1]),item[0]))\n return [(car,len(value))for car,value in code_items]\n \ndef normalized(codelengths):\n car,codelength=codelengths[0]\n value=0\n codes={car:\"0\"*codelength}\n \n for (newcar,nbits)in codelengths[1:]:\n value +=1\n bvalue=str(bin(value))[2:]\n bvalue=\"0\"*(codelength -len(bvalue))+bvalue\n if nbits >codelength:\n codelength=nbits\n bvalue +=\"0\"*(codelength -len(bvalue))\n value=int(bvalue,2)\n assert len(bvalue)==nbits\n codes[newcar]=bvalue\n \n return codes\n \ndef make_tree(node,codes):\n if not hasattr(node,\"parent\"):\n node.code=''\n children=[]\n for bit in '01':\n next_code=node.code+bit\n if next_code in codes:\n child=Node(char=codes[next_code])\n else :\n child=Node()\n child.code=next_code\n children.append(child)\n node.add(children)\n for child in children:\n if not child.is_leaf:\n make_tree(child,codes)\n \ndef decompresser(codelengths):\n lengths=list(codelengths.items())\n \n lengths=[x for x in lengths if x[1]>0]\n lengths.sort(key=lambda item:(item[1],item[0]))\n codes=normalized(lengths)\n codes={value:key for key,value in codes.items()}\n root=Node()\n make_tree(root,codes)\n return {\"root\":root,\"codes\":codes}\n \ndef tree_from_codelengths(codelengths):\n return decompresser(codelengths)[\"root\"]\n \nclass error(Exception):\n pass\n \n \nfixed_codelengths={}\nfor car in range(144):\n fixed_codelengths[car]=8\nfor car in range(144,256):\n fixed_codelengths[car]=9\nfor car in range(256,280):\n fixed_codelengths[car]=7\nfor car in range(280,288):\n fixed_codelengths[car]=8\n \nfixed_decomp=decompresser(fixed_codelengths)\nfixed_lit_len_tree=fixed_decomp[\"root\"]\nfixed_lit_len_codes={value:key\nfor (key,value)in fixed_decomp[\"codes\"].items()}\n\ndef cl_encode(lengths):\n ''\n \n dic={char:len(code)for (char,code)in lengths.items()}\n items=[dic.get(i,0)for i in range(max(dic)+1)]\n pos=0\n while pos 3:\n yield (16,3)\n nb -=3\n yield (16,nb)\n pos +=repeat+1\n \ndef read_codelengths(reader,root,num):\n ''\n\n \n node=root\n lengths=[]\n nb=0\n while len(lengths)256:\n \n if child.char <265:\n length=child.char -254\n elif child.char <269:\n length=11+2 *(child.char -265)+reader.read(1)\n elif child.char <273:\n length=19+4 *(child.char -269)+reader.read(2)\n elif child.char <277:\n length=35+8 *(child.char -273)+reader.read(3)\n elif child.char <281:\n length=67+16 *(child.char -277)+reader.read(4)\n elif child.char <285:\n length=131+31 *(child.char -281)+reader.read(5)\n elif child.char ==285:\n length=258\n return (\"length\",length)\n else :\n node=child\n \ndef adler32(source):\n a=1\n b=0\n for byte in source:\n a +=byte\n a %=65521\n b +=a\n b %=65521\n return a,b\n \n \ndef compress_dynamic(out,source,store,lit_len_count,distance_count):\n\n\n lit_len_count[256]=1\n \n \n \n \n \n \n lit_len_codes=normalized(codelengths_from_frequencies(lit_len_count))\n HLIT=1+max(lit_len_codes)-257\n \n \n \n \n \n \n coded_lit_len=list(cl_encode(lit_len_codes))\n \n \n distance_codes=normalized(codelengths_from_frequencies(distance_count))\n HDIST=max(distance_codes)\n coded_distance=list(cl_encode(distance_codes))\n \n \n codelengths_count={}\n for coded in coded_lit_len,coded_distance:\n for item in coded:\n length=item[0]if isinstance(item,tuple)else item\n codelengths_count[length]=codelengths_count.get(length,0)+1\n \n \n codelengths_codes=normalized(\n codelengths_from_frequencies(codelengths_count))\n codelengths_dict={char:len(value)\n for (char,value)in codelengths_codes.items()}\n \n alphabet=(16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,\n 15)\n \n \n codelengths_list=[codelengths_dict.get(car,0)for car in alphabet]\n \n while codelengths_list[-1]==0:\n codelengths_list.pop()\n HCLEN=len(codelengths_list)-4\n \n out.write(0,1)\n \n out.write_int(HLIT,5)\n out.write_int(HDIST,5)\n out.write_int(HCLEN,4)\n \n \n for length,car in zip(codelengths_list,alphabet):\n out.write_int(length,3)\n \n \n for item in coded_lit_len+coded_distance:\n if isinstance(item,tuple):\n length,extra=item\n code=codelengths_codes[length]\n value,nbits=int(code,2),len(code)\n out.write_int(value,nbits,order=\"msf\")\n if length ==16:\n out.write_int(extra,2)\n elif length ==17:\n out.write_int(extra,3)\n elif length ==18:\n out.write_int(extra,7)\n else :\n code=codelengths_codes[item]\n value,nbits=int(code,2),len(code)\n out.write_int(value,nbits,order=\"msf\")\n \n \n for item in store:\n if isinstance(item,tuple):\n length,extra_length,distance,extra_distance=item\n \n code=lit_len_codes[length]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \n value,nb=extra_length\n if nb:\n out.write_int(value,nb)\n \n code=distance_codes[distance]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \n value,nb=extra_distance\n if nb:\n out.write_int(value,nb)\n else :\n literal=item\n code=lit_len_codes[item]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \ndef compress_fixed(out,source,items):\n ''\n print(\"fixed\",items)\n out.write(1,0)\n \n for item in items:\n if isinstance(item,tuple):\n length,extra_length,distance,extra_distance=item\n \n code=fixed_lit_len_codes[length]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \n value,nb=extra_length\n if nb:\n out.write_int(value,nb)\n \n out.write_int(distance,5,order=\"msf\")\n \n value,nb=extra_distance\n if nb:\n out.write_int(value,nb)\n else :\n literal=item\n code=fixed_lit_len_codes[item]\n value,nb=int(code,2),len(code)\n out.write_int(value,nb,order=\"msf\")\n \ndef compress(source,window_size=32 *1024):\n\n\n\n lit_len_count={}\n \n \n distance_count={}\n \n store=[]\n replaced=0\n nb_tuples=0\n \n for item in lz_generator(source,window_size):\n if isinstance(item,tuple):\n nb_tuples +=1\n length,distance=item\n replaced +=length\n \n \n length_code,*extra_length=length_to_code(length)\n \n lit_len_count[length_code]=lit_len_count.get(length_code,0)+1\n \n \n \n distance_code,*extra_dist=distance_to_code(distance)\n \n distance_count[distance_code]=\\\n distance_count.get(distance_code,0)+1\n \n \n store.append((length_code,extra_length,distance_code,\n extra_dist))\n else :\n literal=item\n lit_len_count[literal]=lit_len_count.get(literal,0)+1\n store.append(literal)\n \n store.append(256)\n \n \n \n \n score=replaced -100 -(nb_tuples *20 //8)\n \n \n out=BitIO()\n \n \n out.write_int(8,4)\n size=window_size >>8\n nb=0\n while size >1:\n size >>=1\n nb +=1\n out.write_int(nb,4)\n out.write_int(0x9c,8)\n \n out.write(1)\n \n if score <0:\n compress_fixed(out,source,store)\n else :\n compress_dynamic(out,source,store,lit_len_count,distance_count)\n \n \n while out.bitnum !=8:\n out.write(0)\n \n \n a,b=adler32(source)\n a1,a2=divmod(a,256)\n b1,b2=divmod(b,256)\n out.write_int(b1,8)\n out.write_int(b2,8)\n out.write_int(a1,8)\n out.write_int(a2,8)\n \n return bytes(out.bytestream)\n \ndef decompress(buf):\n reader=BitIO(buf)\n \n CM=reader.read(4)\n if CM !=8:\n raise error(\"unsupported compression method: {}\".format(CM))\n \n CINFO=reader.read(4)\n \n FLG=reader.read(8)\n \n result=bytearray()\n \n while True :\n BFINAL=reader.read(1)\n \n BTYPE=reader.read(2)\n \n if BTYPE ==0b01:\n \n \n root=fixed_lit_len_tree\n \n while True :\n \n _type,value=read_literal_or_length(reader,root)\n if _type =='eob':\n break\n elif _type =='literal':\n result.append(value)\n elif _type =='length':\n length=value\n \n dist_code=reader.read(5,\"msf\")\n if dist_code <3:\n distance=dist_code+1\n else :\n nb=(dist_code //2)-1\n extra=reader.read(nb)\n half,delta=divmod(dist_code,2)\n distance=(1+(2 **half)+\n delta *(2 **(half -1))+extra)\n for _ in range(length):\n result.append(result[-distance])\n \n node=root\n else :\n node=child\n \n elif BTYPE ==0b10:\n \n \n \n lit_len_tree,distance_tree=dynamic_trees(reader)\n \n while True :\n \n _type,value=read_literal_or_length(reader,lit_len_tree)\n if _type =='eob':\n break\n elif _type =='literal':\n result.append(value)\n elif _type =='length':\n \n length=value\n distance=read_distance(reader,distance_tree)\n for _ in range(length):\n result.append(result[-distance])\n \n if BFINAL:\n \n b1=256 *buf[-4]+buf[-3]\n a1=256 *buf[-2]+buf[-1]\n \n a,b=adler32(result)\n \n assert a ==a1\n assert b ==b1\n \n return bytes(result)\n", ["_zlib_utils"]], "_abcoll": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) for collections, according to PEP 3119.\n\nDON'T USE THIS MODULE DIRECTLY! The classes here should be imported\nvia collections; they are defined here only to alleviate certain\nbootstrapping issues. Unit tests are in test_collections.\n\"\"\"\n\n\n\n\nimport sys\n\n__all__=[\"Hashable\",\"Iterable\",\"Iterator\",\n\"Sized\",\"Container\",\"Callable\",\n\"Set\",\"MutableSet\",\n\"Mapping\",\"MutableMapping\",\n\"MappingView\",\"KeysView\",\"ItemsView\",\"ValuesView\",\n\"Sequence\",\"MutableSequence\",\n\"ByteString\",\n]\n\n\"\"\"\n### collection related types which are not exposed through builtin ###\n## iterators ##\n#fixme brython\n#bytes_iterator = type(iter(b''))\nbytes_iterator = type(iter(''))\n#fixme brython\n#bytearray_iterator = type(iter(bytearray()))\n#callable_iterator = ???\ndict_keyiterator = type(iter({}.keys()))\ndict_valueiterator = type(iter({}.values()))\ndict_itemiterator = type(iter({}.items()))\nlist_iterator = type(iter([]))\nlist_reverseiterator = type(iter(reversed([])))\nrange_iterator = type(iter(range(0)))\nset_iterator = type(iter(set()))\nstr_iterator = type(iter(\"\"))\ntuple_iterator = type(iter(()))\nzip_iterator = type(iter(zip()))\n## views ##\ndict_keys = type({}.keys())\ndict_values = type({}.values())\ndict_items = type({}.items())\n## misc ##\ndict_proxy = type(type.__dict__)\n\"\"\"\n\ndef abstractmethod(self):\n return self\n \n \n \n \n \nclass Iterable:\n\n @abstractmethod\n def __iter__(self):\n while False :\n yield None\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Iterable:\n if any(\"__iter__\"in B.__dict__ for B in C.__mro__):\n return True\n return NotImplemented\n \n \n \nclass Sized:\n\n @abstractmethod\n def __len__(self):\n return 0\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Sized:\n if any(\"__len__\"in B.__dict__ for B in C.__mro__):\n return True\n return NotImplemented\n \n \n \nclass Container:\n\n @abstractmethod\n def __contains__(self,x):\n return False\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Container:\n if any(\"__contains__\"in B.__dict__ for B in C.__mro__):\n return True\n return NotImplemented\n \n \n \n \nclass Mapping(Sized,Iterable,Container):\n\n @abstractmethod\n def __getitem__(self,key):\n raise KeyError\n \n def get(self,key,default=None ):\n try :\n return self[key]\n except KeyError:\n return default\n \n def __contains__(self,key):\n try :\n self[key]\n except KeyError:\n return False\n else :\n return True\n \n def keys(self):\n return KeysView(self)\n \n def items(self):\n return ItemsView(self)\n \n def values(self):\n return ValuesView(self)\n \n def __eq__(self,other):\n if not isinstance(other,Mapping):\n return NotImplemented\n return dict(self.items())==dict(other.items())\n \n def __ne__(self,other):\n return not (self ==other)\n \n \nclass MutableMapping(Mapping):\n\n @abstractmethod\n def __setitem__(self,key,value):\n raise KeyError\n \n @abstractmethod\n def __delitem__(self,key):\n raise KeyError\n \n __marker=object()\n \n def pop(self,key,default=__marker):\n try :\n value=self[key]\n except KeyError:\n if default is self.__marker:\n raise\n return default\n else :\n del self[key]\n return value\n \n def popitem(self):\n try :\n key=next(iter(self))\n except StopIteration:\n raise KeyError\n value=self[key]\n del self[key]\n return key,value\n \n def clear(self):\n try :\n while True :\n self.popitem()\n except KeyError:\n pass\n \n def update(*args,**kwds):\n if len(args)>2:\n raise TypeError(\"update() takes at most 2 positional \"\n \"arguments ({} given)\".format(len(args)))\n elif not args:\n raise TypeError(\"update() takes at least 1 argument (0 given)\")\n self=args[0]\n other=args[1]if len(args)>=2 else ()\n \n if isinstance(other,Mapping):\n for key in other:\n self[key]=other[key]\n elif hasattr(other,\"keys\"):\n for key in other.keys():\n self[key]=other[key]\n else :\n for key,value in other:\n self[key]=value\n for key,value in kwds.items():\n self[key]=value\n \n def setdefault(self,key,default=None ):\n try :\n return self[key]\n except KeyError:\n self[key]=default\n return default\n \n \n", ["sys"]], "_codecs": [".py", "\ndef ascii_decode(*args,**kw):\n pass\n \ndef ascii_encode(*args,**kw):\n pass\n \ndef charbuffer_encode(*args,**kw):\n pass\n \ndef charmap_build(decoding_table):\n return {car:i for (i,car)in enumerate(decoding_table)}\n \ndef charmap_decode(input,errors,decoding_table):\n res=''\n for car in input:\n code=decoding_table[car]\n if code is None :\n raise UnicodeDecodeError(input)\n res +=code\n return res,len(input)\n \ndef charmap_encode(input,errors,encoding_table):\n t=[]\n for car in input:\n code=encoding_table.get(car)\n if code is None :\n raise UnicodeEncodeError(input)\n t.append(code)\n return bytes(t),len(input)\n \ndef decode(obj,encoding=\"utf-8\",errors=\"strict\"):\n ''\n\n\n\n\n\n \n return __BRYTHON__.decode(obj,encoding,errors)\n \ndef encode(obj,encoding=\"utf-8\",errors=\"strict\"):\n ''\n\n\n\n\n\n \n return __BRYTHON__.encode(obj,encoding,errors)\n \ndef escape_decode(*args,**kw):\n pass\n \ndef escape_encode(*args,**kw):\n pass\n \ndef latin_1_decode(*args,**kw):\n pass\n \ndef latin_1_encode(*args,**kw):\n pass\n \ndef lookup(encoding):\n ''\n\n \n if encoding in ('utf-8','utf_8'):\n from browser import console\n import encodings.utf_8\n return encodings.utf_8.getregentry()\n \n LookupError(encoding)\n \ndef lookup_error(*args,**kw):\n ''\n\n \n pass\n \ndef mbcs_decode(*args,**kw):\n pass\n \ndef mbcs_encode(*args,**kw):\n pass\n \ndef raw_unicode_escape_decode(*args,**kw):\n pass\n \ndef raw_unicode_escape_encode(*args,**kw):\n pass\n \ndef readbuffer_encode(*args,**kw):\n pass\n \ndef register(*args,**kw):\n ''\n\n\n\n \n pass\n \ndef register_error(*args,**kw):\n ''\n\n\n\n\n \n pass\n \ndef unicode_escape_decode(*args,**kw):\n pass\n \ndef unicode_escape_encode(*args,**kw):\n pass\n \ndef unicode_internal_decode(*args,**kw):\n pass\n \ndef unicode_internal_encode(*args,**kw):\n pass\n \ndef utf_16_be_decode(*args,**kw):\n pass\n \ndef utf_16_be_encode(*args,**kw):\n pass\n \ndef utf_16_decode(*args,**kw):\n pass\n \ndef utf_16_encode(*args,**kw):\n pass\n \ndef utf_16_ex_decode(*args,**kw):\n pass\n \ndef utf_16_le_decode(*args,**kw):\n pass\n \ndef utf_16_le_encode(*args,**kw):\n pass\n \ndef utf_32_be_decode(*args,**kw):\n pass\n \ndef utf_32_be_encode(*args,**kw):\n pass\n \ndef utf_32_decode(*args,**kw):\n pass\n \ndef utf_32_encode(*args,**kw):\n pass\n \ndef utf_32_ex_decode(*args,**kw):\n pass\n \ndef utf_32_le_decode(*args,**kw):\n pass\n \ndef utf_32_le_encode(*args,**kw):\n pass\n \ndef utf_7_decode(*args,**kw):\n pass\n \ndef utf_7_encode(*args,**kw):\n pass\n \ndef utf_8_decode(decoder,bytes_obj,errors,*args):\n return (bytes_obj.decode(\"utf-8\"),len(bytes_obj))\n \ndef utf_8_encode(*args,**kw):\n input=args[0]\n if len(args)==2:\n errors=args[1]\n else :\n errors=kw.get('errors','strict')\n \n \n return (bytes(input,'utf-8'),len(input))\n", ["browser", "encodings.utf_8"]], "_collections": [".py", "\n\n\n\n\n\n\n\n\n\nimport operator\n\n\n\ndef _thread_ident():\n return -1\n \n \nn=30\nLFTLNK=n\nRGTLNK=n+1\nBLOCKSIZ=n+2\n\n\n\n\n\n\n\n\nclass deque:\n\n def __new__(cls,iterable=(),*args,**kw):\n \n \n self=object.__new__(cls,*args,**kw)\n self.clear()\n return self\n \n def __init__(self,iterable=(),maxlen=None ):\n object.__init__(self)\n self.clear()\n if maxlen is not None :\n if maxlen <0:\n raise ValueError(\"maxlen must be non-negative\")\n self._maxlen=maxlen\n add=self.append\n for elem in iterable:\n add(elem)\n \n @property\n def maxlen(self):\n return self._maxlen\n \n def clear(self):\n self.right=self.left=[None ]*BLOCKSIZ\n self.rightndx=n //2\n self.leftndx=n //2+1\n self.length=0\n self.state=0\n \n def append(self,x):\n self.state +=1\n self.rightndx +=1\n if self.rightndx ==n:\n newblock=[None ]*BLOCKSIZ\n self.right[RGTLNK]=newblock\n newblock[LFTLNK]=self.right\n self.right=newblock\n self.rightndx=0\n self.length +=1\n self.right[self.rightndx]=x\n if self.maxlen is not None and self.length >self.maxlen:\n self.popleft()\n \n def appendleft(self,x):\n self.state +=1\n self.leftndx -=1\n if self.leftndx ==-1:\n newblock=[None ]*BLOCKSIZ\n self.left[LFTLNK]=newblock\n newblock[RGTLNK]=self.left\n self.left=newblock\n self.leftndx=n -1\n self.length +=1\n self.left[self.leftndx]=x\n if self.maxlen is not None and self.length >self.maxlen:\n self.pop()\n \n def extend(self,iterable):\n if iterable is self:\n iterable=list(iterable)\n for elem in iterable:\n self.append(elem)\n \n def extendleft(self,iterable):\n if iterable is self:\n iterable=list(iterable)\n for elem in iterable:\n self.appendleft(elem)\n \n def pop(self):\n if self.left is self.right and self.leftndx >self.rightndx:\n \n raise IndexError(\"pop from an empty deque\")\n x=self.right[self.rightndx]\n self.right[self.rightndx]=None\n self.length -=1\n self.rightndx -=1\n self.state +=1\n if self.rightndx ==-1:\n prevblock=self.right[LFTLNK]\n if prevblock is None :\n \n self.rightndx=n //2\n self.leftndx=n //2+1\n else :\n prevblock[RGTLNK]=None\n self.right[LFTLNK]=None\n self.right=prevblock\n self.rightndx=n -1\n return x\n \n def popleft(self):\n if self.left is self.right and self.leftndx >self.rightndx:\n \n raise IndexError(\"pop from an empty deque\")\n x=self.left[self.leftndx]\n self.left[self.leftndx]=None\n self.length -=1\n self.leftndx +=1\n self.state +=1\n if self.leftndx ==n:\n prevblock=self.left[RGTLNK]\n if prevblock is None :\n \n self.rightndx=n //2\n self.leftndx=n //2+1\n else :\n prevblock[LFTLNK]=None\n self.left[RGTLNK]=None\n self.left=prevblock\n self.leftndx=0\n return x\n \n def count(self,value):\n c=0\n for item in self:\n if item ==value:\n c +=1\n return c\n \n def remove(self,value):\n \n for i in range(len(self)):\n if self[i]==value:\n del self[i]\n return\n raise ValueError(\"deque.remove(x): x not in deque\")\n \n def rotate(self,n=1):\n length=len(self)\n if length ==0:\n return\n halflen=(length+1)>>1\n if n >halflen or n <-halflen:\n n %=length\n if n >halflen:\n n -=length\n elif n <-halflen:\n n +=length\n while n >0:\n self.appendleft(self.pop())\n n -=1\n while n <0:\n self.append(self.popleft())\n n +=1\n \n def reverse(self):\n ''\n leftblock=self.left\n rightblock=self.right\n leftindex=self.leftndx\n rightindex=self.rightndx\n for i in range(self.length //2):\n \n assert leftblock !=rightblock or leftindex =0:\n block=self.left\n while block:\n l,r=0,n\n if block is self.left:\n l=self.leftndx\n if block is self.right:\n r=self.rightndx+1\n span=r -l\n if index =negative_span:\n return block,r+index\n index -=negative_span\n block=block[LFTLNK]\n raise IndexError(\"deque index out of range\")\n \n def __getitem__(self,index):\n block,index=self.__getref(index)\n return block[index]\n \n def __setitem__(self,index,value):\n block,index=self.__getref(index)\n block[index]=value\n \n def __delitem__(self,index):\n length=len(self)\n if index >=0:\n if index >=length:\n raise IndexError(\"deque index out of range\")\n self.rotate(-index)\n self.popleft()\n self.rotate(index)\n else :\n \n index=index ^(2 **31)\n if index >=length:\n raise IndexError(\"deque index out of range\")\n self.rotate(index)\n self.pop()\n self.rotate(-index)\n \n def __reduce_ex__(self,proto):\n return type(self),(list(self),self.maxlen)\n \n def __hash__(self):\n \n raise TypeError(\"deque objects are unhashable\")\n \n def __copy__(self):\n return self.__class__(self,self.maxlen)\n \n \n def __eq__(self,other):\n if isinstance(other,deque):\n return list(self)==list(other)\n else :\n return NotImplemented\n \n def __ne__(self,other):\n if isinstance(other,deque):\n return list(self)!=list(other)\n else :\n return NotImplemented\n \n def __lt__(self,other):\n if isinstance(other,deque):\n return list(self)list(other)\n else :\n return NotImplemented\n \n def __ge__(self,other):\n if isinstance(other,deque):\n return list(self)>=list(other)\n else :\n return NotImplemented\n \n def __iadd__(self,other):\n self.extend(other)\n return self\n \n \nclass deque_iterator(object):\n\n def __init__(self,deq,itergen):\n self.counter=len(deq)\n def giveup():\n self.counter=0\n \n raise RuntimeError(\"deque mutated during iteration\")\n self._gen=itergen(deq.state,giveup)\n \n def __next__(self):\n res=self._gen.__next__()\n self.counter -=1\n return res\n \n def __iter__(self):\n return self\n \nclass defaultdict(dict):\n\n def __init__(self,*args,**kwds):\n if len(args)>0:\n default_factory=args[0]\n args=args[1:]\n if not callable(default_factory)and default_factory is not None :\n raise TypeError(\"first argument must be callable\")\n else :\n default_factory=None\n dict.__init__(self,*args,**kwds)\n self.default_factory=default_factory\n self.update(*args,**kwds)\n super(defaultdict,self).__init__(*args,**kwds)\n \n def __missing__(self,key):\n \n if self.default_factory is None :\n raise KeyError(key)\n self[key]=value=self.default_factory()\n return value\n \n def __repr__(self,recurse=set()):\n if id(self)in recurse:\n return \"defaultdict(...)\"\n try :\n recurse.add(id(self))\n return \"defaultdict(%s, %s)\"%(repr(self.default_factory),super(defaultdict,self).__repr__())\n finally :\n recurse.remove(id(self))\n \n def copy(self):\n return type(self)(self.default_factory,self)\n \n def __copy__(self):\n return self.copy()\n \n def __reduce__(self):\n \n \n \n \n \n \n \n \n \n \n \n return (type(self),(self.default_factory,),None ,None ,self.items())\n \nfrom operator import itemgetter as _itemgetter\nfrom keyword import iskeyword as _iskeyword\nimport sys as _sys\n\ndef namedtuple(typename,field_names,verbose=False ,rename=False ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n if isinstance(field_names,str):\n field_names=field_names.replace(',',' ').split()\n field_names=tuple(map(str,field_names))\n if rename:\n names=list(field_names)\n seen=set()\n for i,name in enumerate(names):\n if (not min(c.isalnum()or c =='_'for c in name)or _iskeyword(name)\n or not name or name[0].isdigit()or name.startswith('_')\n or name in seen):\n names[i]='_%d'%i\n seen.add(name)\n field_names=tuple(names)\n for name in (typename,)+field_names:\n if not min(c.isalnum()or c =='_'for c in name):\n raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r'%name)\n if _iskeyword(name):\n raise ValueError('Type names and field names cannot be a keyword: %r'%name)\n if name[0].isdigit():\n raise ValueError('Type names and field names cannot start with a number: %r'%name)\n seen_names=set()\n for name in field_names:\n if name.startswith('_')and not rename:\n raise ValueError('Field names cannot start with an underscore: %r'%name)\n if name in seen_names:\n raise ValueError('Encountered duplicate field name: %r'%name)\n seen_names.add(name)\n \n \n numfields=len(field_names)\n argtxt=repr(field_names).replace(\"'\",\"\")[1:-1]\n reprtxt=', '.join('%s=%%r'%name for name in field_names)\n \n template='''class %(typename)s(tuple):\n '%(typename)s(%(argtxt)s)' \\n\n __slots__ = () \\n\n _fields = %(field_names)r \\n\n def __new__(_cls, %(argtxt)s):\n return tuple.__new__(_cls, (%(argtxt)s)) \\n\n @classmethod\n def _make(cls, iterable, new=tuple.__new__, len=len):\n 'Make a new %(typename)s object from a sequence or iterable'\n result = new(cls, iterable)\n if len(result) != %(numfields)d:\n raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))\n return result \\n\n def __repr__(self):\n return '%(typename)s(%(reprtxt)s)' %% self \\n\n def _asdict(self):\n 'Return a new dict which maps field names to their values'\n return dict(zip(self._fields, self)) \\n\n def _replace(_self, **kwds):\n 'Return a new %(typename)s object replacing specified fields with new values'\n result = _self._make(map(kwds.pop, %(field_names)r, _self))\n if kwds:\n raise ValueError('Got unexpected field names: %%r' %% kwds.keys())\n return result \\n\n def __getnewargs__(self):\n return tuple(self) \\n\\n'''%locals()\n for i,name in enumerate(field_names):\n template +=' %s = _property(_itemgetter(%d))\\n'%(name,i)\n \n if verbose:\n print(template)\n \n \n namespace=dict(_itemgetter=_itemgetter,__name__='namedtuple_%s'%typename,\n _property=property,_tuple=tuple)\n try :\n exec(template,namespace)\n except SyntaxError as e:\n raise SyntaxError(e.message+':\\n'+template)\n result=namespace[typename]\n \n \n \n \n \n try :\n result.__module__=_sys._getframe(1).f_globals.get('__name__','__main__')\n except (AttributeError,ValueError):\n pass\n \n return result\n \nif __name__ =='__main__':\n Point=namedtuple('Point',['x','y'])\n p=Point(11,y=22)\n print(p[0]+p[1])\n x,y=p\n print(x,y)\n print(p.x+p.y)\n print(p)\n", ["keyword", "operator", "sys"]], "_collections_abc": [".py", "\n\n\n\"\"\"Abstract Base Classes (ABCs) for collections, according to PEP 3119.\n\nUnit tests are in test_collections.\n\"\"\"\n\nfrom abc import ABCMeta,abstractmethod\nimport sys\n\nGenericAlias=type(list[int])\n\n__all__=[\"Awaitable\",\"Coroutine\",\n\"AsyncIterable\",\"AsyncIterator\",\"AsyncGenerator\",\n\"Hashable\",\"Iterable\",\"Iterator\",\"Generator\",\"Reversible\",\n\"Sized\",\"Container\",\"Callable\",\"Collection\",\n\"Set\",\"MutableSet\",\n\"Mapping\",\"MutableMapping\",\n\"MappingView\",\"KeysView\",\"ItemsView\",\"ValuesView\",\n\"Sequence\",\"MutableSequence\",\n\"ByteString\",\n]\n\n\n\n\n\n__name__=\"collections.abc\"\n\n\n\n\n\n\n\n\nbytes_iterator=type(iter(b''))\nbytearray_iterator=type(iter(bytearray()))\n\ndict_keyiterator=type(iter({}.keys()))\ndict_valueiterator=type(iter({}.values()))\ndict_itemiterator=type(iter({}.items()))\nlist_iterator=type(iter([]))\nlist_reverseiterator=type(iter(reversed([])))\nrange_iterator=type(iter(range(0)))\nlongrange_iterator=type(iter(range(1 <<1000)))\nset_iterator=type(iter(set()))\nstr_iterator=type(iter(\"\"))\ntuple_iterator=type(iter(()))\nzip_iterator=type(iter(zip()))\n\ndict_keys=type({}.keys())\ndict_values=type({}.values())\ndict_items=type({}.items())\n\nmappingproxy=type(type.__dict__)\ngenerator=type((lambda :(yield ))())\n\nasync def _coro():pass\n_coro=_coro()\ncoroutine=type(_coro)\n_coro.close()\ndel _coro\n\nasync def _ag():yield\n_ag=_ag()\nasync_generator=type(_ag)\ndel _ag\n\n\n\n\ndef _check_methods(C,*methods):\n mro=C.__mro__\n for method in methods:\n for B in mro:\n if method in B.__dict__:\n if B.__dict__[method]is None :\n return NotImplemented\n break\n else :\n return NotImplemented\n return True\n \nclass Hashable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __hash__(self):\n return 0\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Hashable:\n return _check_methods(C,\"__hash__\")\n return NotImplemented\n \n \nclass Awaitable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __await__(self):\n yield\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Awaitable:\n return _check_methods(C,\"__await__\")\n return NotImplemented\n \n __class_getitem__=classmethod(GenericAlias)\n \n \nclass Coroutine(Awaitable):\n\n __slots__=()\n \n @abstractmethod\n def send(self,value):\n ''\n\n \n raise StopIteration\n \n @abstractmethod\n def throw(self,typ,val=None ,tb=None ):\n ''\n\n \n if val is None :\n if tb is None :\n raise typ\n val=typ()\n if tb is not None :\n val=val.with_traceback(tb)\n raise val\n \n def close(self):\n ''\n \n try :\n self.throw(GeneratorExit)\n except (GeneratorExit,StopIteration):\n pass\n else :\n raise RuntimeError(\"coroutine ignored GeneratorExit\")\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Coroutine:\n return _check_methods(C,'__await__','send','throw','close')\n return NotImplemented\n \n \nCoroutine.register(coroutine)\n\n\nclass AsyncIterable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __aiter__(self):\n return AsyncIterator()\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is AsyncIterable:\n return _check_methods(C,\"__aiter__\")\n return NotImplemented\n \n __class_getitem__=classmethod(GenericAlias)\n \n \nclass AsyncIterator(AsyncIterable):\n\n __slots__=()\n \n @abstractmethod\n async def __anext__(self):\n ''\n raise StopAsyncIteration\n \n def __aiter__(self):\n return self\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is AsyncIterator:\n return _check_methods(C,\"__anext__\",\"__aiter__\")\n return NotImplemented\n \n \nclass AsyncGenerator(AsyncIterator):\n\n __slots__=()\n \n async def __anext__(self):\n ''\n\n \n return await self.asend(None )\n \n @abstractmethod\n async def asend(self,value):\n ''\n\n \n raise StopAsyncIteration\n \n @abstractmethod\n async def athrow(self,typ,val=None ,tb=None ):\n ''\n\n \n if val is None :\n if tb is None :\n raise typ\n val=typ()\n if tb is not None :\n val=val.with_traceback(tb)\n raise val\n \n async def aclose(self):\n ''\n \n try :\n await self.athrow(GeneratorExit)\n except (GeneratorExit,StopAsyncIteration):\n pass\n else :\n raise RuntimeError(\"asynchronous generator ignored GeneratorExit\")\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is AsyncGenerator:\n return _check_methods(C,'__aiter__','__anext__',\n 'asend','athrow','aclose')\n return NotImplemented\n \n \nAsyncGenerator.register(async_generator)\n\n\nclass Iterable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __iter__(self):\n while False :\n yield None\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Iterable:\n return _check_methods(C,\"__iter__\")\n return NotImplemented\n \n __class_getitem__=classmethod(GenericAlias)\n \n \nclass Iterator(Iterable):\n\n __slots__=()\n \n @abstractmethod\n def __next__(self):\n ''\n raise StopIteration\n \n def __iter__(self):\n return self\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Iterator:\n return _check_methods(C,'__iter__','__next__')\n return NotImplemented\n \n \nIterator.register(bytes_iterator)\nIterator.register(bytearray_iterator)\n\nIterator.register(dict_keyiterator)\nIterator.register(dict_valueiterator)\nIterator.register(dict_itemiterator)\nIterator.register(list_iterator)\nIterator.register(list_reverseiterator)\nIterator.register(range_iterator)\nIterator.register(longrange_iterator)\nIterator.register(set_iterator)\nIterator.register(str_iterator)\nIterator.register(tuple_iterator)\nIterator.register(zip_iterator)\n\n\nclass Reversible(Iterable):\n\n __slots__=()\n \n @abstractmethod\n def __reversed__(self):\n while False :\n yield None\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Reversible:\n return _check_methods(C,\"__reversed__\",\"__iter__\")\n return NotImplemented\n \n \nclass Generator(Iterator):\n\n __slots__=()\n \n def __next__(self):\n ''\n\n \n return self.send(None )\n \n @abstractmethod\n def send(self,value):\n ''\n\n \n raise StopIteration\n \n @abstractmethod\n def throw(self,typ,val=None ,tb=None ):\n ''\n\n \n if val is None :\n if tb is None :\n raise typ\n val=typ()\n if tb is not None :\n val=val.with_traceback(tb)\n raise val\n \n def close(self):\n ''\n \n try :\n self.throw(GeneratorExit)\n except (GeneratorExit,StopIteration):\n pass\n else :\n raise RuntimeError(\"generator ignored GeneratorExit\")\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Generator:\n return _check_methods(C,'__iter__','__next__',\n 'send','throw','close')\n return NotImplemented\n \n \nGenerator.register(generator)\n\n\nclass Sized(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __len__(self):\n return 0\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Sized:\n return _check_methods(C,\"__len__\")\n return NotImplemented\n \n \nclass Container(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __contains__(self,x):\n return False\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Container:\n return _check_methods(C,\"__contains__\")\n return NotImplemented\n \n __class_getitem__=classmethod(GenericAlias)\n \n \nclass Collection(Sized,Iterable,Container):\n\n __slots__=()\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Collection:\n return _check_methods(C,\"__len__\",\"__iter__\",\"__contains__\")\n return NotImplemented\n \n \nclass Callable(metaclass=ABCMeta):\n\n __slots__=()\n \n @abstractmethod\n def __call__(self,*args,**kwds):\n return False\n \n @classmethod\n def __subclasshook__(cls,C):\n if cls is Callable:\n return _check_methods(C,\"__call__\")\n return NotImplemented\n \n __class_getitem__=classmethod(GenericAlias)\n \n \n \n \n \nclass Set(Collection):\n\n ''\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n def __le__(self,other):\n if not isinstance(other,Set):\n return NotImplemented\n if len(self)>len(other):\n return False\n for elem in self:\n if elem not in other:\n return False\n return True\n \n def __lt__(self,other):\n if not isinstance(other,Set):\n return NotImplemented\n return len(self)len(other)and self.__ge__(other)\n \n def __ge__(self,other):\n if not isinstance(other,Set):\n return NotImplemented\n if len(self)MAX:\n h -=MASK+1\n if h ==-1:\n h=590923713\n return h\n \n \nSet.register(frozenset)\n\n\nclass MutableSet(Set):\n ''\n\n\n\n\n\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def add(self,value):\n ''\n raise NotImplementedError\n \n @abstractmethod\n def discard(self,value):\n ''\n raise NotImplementedError\n \n def remove(self,value):\n ''\n if value not in self:\n raise KeyError(value)\n self.discard(value)\n \n def pop(self):\n ''\n it=iter(self)\n try :\n value=next(it)\n except StopIteration:\n raise KeyError from None\n self.discard(value)\n return value\n \n def clear(self):\n ''\n try :\n while True :\n self.pop()\n except KeyError:\n pass\n \n def __ior__(self,it):\n for value in it:\n self.add(value)\n return self\n \n def __iand__(self,it):\n for value in (self -it):\n self.discard(value)\n return self\n \n def __ixor__(self,it):\n if it is self:\n self.clear()\n else :\n if not isinstance(it,Set):\n it=self._from_iterable(it)\n for value in it:\n if value in self:\n self.discard(value)\n else :\n self.add(value)\n return self\n \n def __isub__(self,it):\n if it is self:\n self.clear()\n else :\n for value in it:\n self.discard(value)\n return self\n \n \nMutableSet.register(set)\n\n\n\n\n\nclass Mapping(Collection):\n\n __slots__=()\n \n \"\"\"A Mapping is a generic container for associating key/value\n pairs.\n\n This class provides concrete generic implementations of all\n methods except for __getitem__, __iter__, and __len__.\n\n \"\"\"\n \n @abstractmethod\n def __getitem__(self,key):\n raise KeyError\n \n def get(self,key,default=None ):\n ''\n try :\n return self[key]\n except KeyError:\n return default\n \n def __contains__(self,key):\n try :\n self[key]\n except KeyError:\n return False\n else :\n return True\n \n def keys(self):\n ''\n return KeysView(self)\n \n def items(self):\n ''\n return ItemsView(self)\n \n def values(self):\n ''\n return ValuesView(self)\n \n def __eq__(self,other):\n if not isinstance(other,Mapping):\n return NotImplemented\n return dict(self.items())==dict(other.items())\n \n __reversed__=None\n \n \nMapping.register(mappingproxy)\n\n\nclass MappingView(Sized):\n\n __slots__='_mapping',\n \n def __init__(self,mapping):\n self._mapping=mapping\n \n def __len__(self):\n return len(self._mapping)\n \n def __repr__(self):\n return '{0.__class__.__name__}({0._mapping!r})'.format(self)\n \n __class_getitem__=classmethod(GenericAlias)\n \n \nclass KeysView(MappingView,Set):\n\n __slots__=()\n \n @classmethod\n def _from_iterable(self,it):\n return set(it)\n \n def __contains__(self,key):\n return key in self._mapping\n \n def __iter__(self):\n yield from self._mapping\n \n \nKeysView.register(dict_keys)\n\n\nclass ItemsView(MappingView,Set):\n\n __slots__=()\n \n @classmethod\n def _from_iterable(self,it):\n return set(it)\n \n def __contains__(self,item):\n key,value=item\n try :\n v=self._mapping[key]\n except KeyError:\n return False\n else :\n return v is value or v ==value\n \n def __iter__(self):\n for key in self._mapping:\n yield (key,self._mapping[key])\n \n \nItemsView.register(dict_items)\n\n\nclass ValuesView(MappingView,Collection):\n\n __slots__=()\n \n def __contains__(self,value):\n for key in self._mapping:\n v=self._mapping[key]\n if v is value or v ==value:\n return True\n return False\n \n def __iter__(self):\n for key in self._mapping:\n yield self._mapping[key]\n \n \nValuesView.register(dict_values)\n\n\nclass MutableMapping(Mapping):\n\n __slots__=()\n \n \"\"\"A MutableMapping is a generic container for associating\n key/value pairs.\n\n This class provides concrete generic implementations of all\n methods except for __getitem__, __setitem__, __delitem__,\n __iter__, and __len__.\n\n \"\"\"\n \n @abstractmethod\n def __setitem__(self,key,value):\n raise KeyError\n \n @abstractmethod\n def __delitem__(self,key):\n raise KeyError\n \n __marker=object()\n \n def pop(self,key,default=__marker):\n ''\n\n \n try :\n value=self[key]\n except KeyError:\n if default is self.__marker:\n raise\n return default\n else :\n del self[key]\n return value\n \n def popitem(self):\n ''\n\n \n try :\n key=next(iter(self))\n except StopIteration:\n raise KeyError from None\n value=self[key]\n del self[key]\n return key,value\n \n def clear(self):\n ''\n try :\n while True :\n self.popitem()\n except KeyError:\n pass\n \n def update(self,other=(),/,**kwds):\n ''\n\n\n\n \n if isinstance(other,Mapping):\n for key in other:\n self[key]=other[key]\n elif hasattr(other,\"keys\"):\n for key in other.keys():\n self[key]=other[key]\n else :\n for key,value in other:\n self[key]=value\n for key,value in kwds.items():\n self[key]=value\n \n def setdefault(self,key,default=None ):\n ''\n try :\n return self[key]\n except KeyError:\n self[key]=default\n return default\n \n \nMutableMapping.register(dict)\n\n\n\n\n\nclass Sequence(Reversible,Collection):\n\n ''\n\n\n\n \n \n __slots__=()\n \n @abstractmethod\n def __getitem__(self,index):\n raise IndexError\n \n def __iter__(self):\n i=0\n try :\n while True :\n v=self[i]\n yield v\n i +=1\n except IndexError:\n return\n \n def __contains__(self,value):\n for v in self:\n if v is value or v ==value:\n return True\n return False\n \n def __reversed__(self):\n for i in reversed(range(len(self))):\n yield self[i]\n \n def index(self,value,start=0,stop=None ):\n ''\n\n\n\n\n \n if start is not None and start <0:\n start=max(len(self)+start,0)\n if stop is not None and stop <0:\n stop +=len(self)\n \n i=start\n while stop is None or i \"\n \n copy=\"\"\n \n def get(self,*args):\n pass\n \n items=\"\"\n \n keys=\"\"\n \n run=\"\"\n \n values=\"\"\n \nclass ContextVar:\n\n def __init__(self,name,**kw):\n ''\n self.name=name\n if \"default\"in kw:\n self.default=kw[\"default\"]\n \n def get(self,*args):\n if hasattr(self,\"value\"):\n return self.value\n elif len(args)==1:\n return args[0]\n elif hasattr(self,\"default\"):\n return self.default\n raise LookupError(self.name)\n \n def reset(self,token):\n if token.old_value ==Token.MISSING:\n del self.value\n else :\n self.value=token.old_value\n \n def set(self,value):\n self.value=value\n return Token(self)\n \nclass Token(object):\n\n MISSING=\"\"\n \n def __init__(self,contextvar):\n self.var=contextvar\n try :\n self.old_value=contextvar.get()\n except LookupError:\n self.old_value=Token.MISSING\n \ndef copy_context(*args,**kw):\n pass\n", []], "_csv": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__=\"1.0\"\n\nQUOTE_MINIMAL,QUOTE_ALL,QUOTE_NONNUMERIC,QUOTE_NONE=range(4)\n_dialects={}\n_field_limit=128 *1024\n\nclass Error(Exception):\n pass\n \nclass Dialect(object):\n ''\n\n \n \n __slots__=[\"_delimiter\",\"_doublequote\",\"_escapechar\",\n \"_lineterminator\",\"_quotechar\",\"_quoting\",\n \"_skipinitialspace\",\"_strict\"]\n \n def __new__(cls,dialect,**kwargs):\n \n for name in kwargs:\n if '_'+name not in Dialect.__slots__:\n raise TypeError(\"unexpected keyword argument '%s'\"%\n (name,))\n \n if dialect is not None :\n if isinstance(dialect,str):\n dialect=get_dialect(dialect)\n \n \n if (isinstance(dialect,Dialect)\n and all(value is None for value in kwargs.values())):\n return dialect\n \n self=object.__new__(cls)\n \n \n def set_char(x):\n if x is None :\n return None\n if isinstance(x,str)and len(x)<=1:\n return x\n raise TypeError(\"%r must be a 1-character string\"%(name,))\n def set_str(x):\n if isinstance(x,str):\n return x\n raise TypeError(\"%r must be a string\"%(name,))\n def set_quoting(x):\n if x in range(4):\n return x\n raise TypeError(\"bad 'quoting' value\")\n \n attributes={\"delimiter\":(',',set_char),\n \"doublequote\":(True ,bool),\n \"escapechar\":(None ,set_char),\n \"lineterminator\":(\"\\r\\n\",set_str),\n \"quotechar\":('\"',set_char),\n \"quoting\":(QUOTE_MINIMAL,set_quoting),\n \"skipinitialspace\":(False ,bool),\n \"strict\":(False ,bool),\n }\n \n \n notset=object()\n for name in Dialect.__slots__:\n name=name[1:]\n value=notset\n if name in kwargs:\n value=kwargs[name]\n elif dialect is not None :\n value=getattr(dialect,name,notset)\n \n \n if value is notset:\n value=attributes[name][0]\n if name =='quoting'and not self.quotechar:\n value=QUOTE_NONE\n else :\n converter=attributes[name][1]\n if converter:\n value=converter(value)\n \n setattr(self,'_'+name,value)\n \n if not self.delimiter:\n raise TypeError(\"delimiter must be set\")\n \n if self.quoting !=QUOTE_NONE and not self.quotechar:\n raise TypeError(\"quotechar must be set if quoting enabled\")\n \n if not self.lineterminator:\n raise TypeError(\"lineterminator must be set\")\n \n return self\n \n delimiter=property(lambda self:self._delimiter)\n doublequote=property(lambda self:self._doublequote)\n escapechar=property(lambda self:self._escapechar)\n lineterminator=property(lambda self:self._lineterminator)\n quotechar=property(lambda self:self._quotechar)\n quoting=property(lambda self:self._quoting)\n skipinitialspace=property(lambda self:self._skipinitialspace)\n strict=property(lambda self:self._strict)\n \n \ndef _call_dialect(dialect_inst,kwargs):\n return Dialect(dialect_inst,**kwargs)\n \ndef register_dialect(name,dialect=None ,**kwargs):\n ''\n \n if not isinstance(name,str):\n raise TypeError(\"dialect name must be a string or unicode\")\n \n dialect=_call_dialect(dialect,kwargs)\n _dialects[name]=dialect\n \ndef unregister_dialect(name):\n ''\n \n try :\n del _dialects[name]\n except KeyError:\n raise Error(\"unknown dialect\")\n \ndef get_dialect(name):\n ''\n \n try :\n return _dialects[name]\n except KeyError:\n raise Error(\"unknown dialect\")\n \ndef list_dialects():\n ''\n \n return list(_dialects)\n \nclass Reader(object):\n ''\n\n\n \n \n \n (START_RECORD,START_FIELD,ESCAPED_CHAR,IN_FIELD,\n IN_QUOTED_FIELD,ESCAPE_IN_QUOTED_FIELD,QUOTE_IN_QUOTED_FIELD,\n EAT_CRNL)=range(8)\n \n def __init__(self,iterator,dialect=None ,**kwargs):\n self.dialect=_call_dialect(dialect,kwargs)\n \n \n \n self._delimiter=self.dialect.delimiter if self.dialect.delimiter else '\\0'\n self._quotechar=self.dialect.quotechar if self.dialect.quotechar else '\\0'\n self._escapechar=self.dialect.escapechar if self.dialect.escapechar else '\\0'\n self._doublequote=self.dialect.doublequote\n self._quoting=self.dialect.quoting\n self._skipinitialspace=self.dialect.skipinitialspace\n self._strict=self.dialect.strict\n \n self.input_iter=iter(iterator)\n self.line_num=0\n \n self._parse_reset()\n \n def _parse_reset(self):\n self.field=''\n self.fields=[]\n self.state=self.START_RECORD\n self.numeric_field=False\n \n def __iter__(self):\n return self\n \n def __next__(self):\n self._parse_reset()\n while True :\n try :\n line=next(self.input_iter)\n except StopIteration:\n \n if len(self.field)>0:\n raise Error(\"newline inside string\")\n raise\n \n self.line_num +=1\n \n if '\\0'in line:\n raise Error(\"line contains NULL byte\")\n self._parse_process_char(line)\n self._parse_eol()\n \n if self.state ==self.START_RECORD:\n break\n \n fields=self.fields\n self.fields=[]\n return fields\n \n def _parse_process_char(self,line):\n pos=0\n while pos pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n self._parse_save_field()\n self.state=self.EAT_CRNL\n break\n elif line[pos2]==self._escapechar[0]:\n \n if pos2 >pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n self.state=self.ESCAPED_CHAR\n break\n elif line[pos2]==self._delimiter[0]:\n \n if pos2 >pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n self._parse_save_field()\n self.state=self.START_FIELD\n break\n \n pos2 +=1\n else :\n if pos2 >pos:\n self._parse_add_str(line[pos:pos2])\n pos=pos2\n continue\n \n elif self.state ==self.START_RECORD:\n if line[pos]=='\\n'or line[pos]=='\\r':\n self.state=self.EAT_CRNL\n else :\n self.state=self.START_FIELD\n \n continue\n \n elif self.state ==self.START_FIELD:\n if line[pos]=='\\n'or line[pos]=='\\r':\n \n self._parse_save_field()\n self.state=self.EAT_CRNL\n elif (line[pos]==self._quotechar[0]\n and self._quoting !=QUOTE_NONE):\n \n self.state=self.IN_QUOTED_FIELD\n elif line[pos]==self._escapechar[0]:\n \n self.state=self.ESCAPED_CHAR\n elif self._skipinitialspace and line[pos]==' ':\n \n pass\n elif line[pos]==self._delimiter[0]:\n \n self._parse_save_field()\n else :\n \n if self._quoting ==QUOTE_NONNUMERIC:\n self.numeric_field=True\n self.state=self.IN_FIELD\n continue\n \n elif self.state ==self.ESCAPED_CHAR:\n self._parse_add_char(line[pos])\n self.state=self.IN_FIELD\n \n elif self.state ==self.IN_QUOTED_FIELD:\n if line[pos]==self._escapechar:\n \n self.state=self.ESCAPE_IN_QUOTED_FIELD\n elif (line[pos]==self._quotechar\n and self._quoting !=QUOTE_NONE):\n if self._doublequote:\n \n self.state=self.QUOTE_IN_QUOTED_FIELD\n else :\n \n self.state=self.IN_FIELD\n else :\n \n self._parse_add_char(line[pos])\n \n elif self.state ==self.ESCAPE_IN_QUOTED_FIELD:\n self._parse_add_char(line[pos])\n self.state=self.IN_QUOTED_FIELD\n \n elif self.state ==self.QUOTE_IN_QUOTED_FIELD:\n \n if (line[pos]==self._quotechar\n and self._quoting !=QUOTE_NONE):\n \n self._parse_add_char(line[pos])\n self.state=self.IN_QUOTED_FIELD\n elif line[pos]==self._delimiter[0]:\n \n self._parse_save_field()\n self.state=self.START_FIELD\n elif line[pos]=='\\r'or line[pos]=='\\n':\n \n self._parse_save_field()\n self.state=self.EAT_CRNL\n elif not self._strict:\n self._parse_add_char(line[pos])\n self.state=self.IN_FIELD\n else :\n raise Error(\"'%c' expected after '%c'\"%\n (self._delimiter,self._quotechar))\n \n elif self.state ==self.EAT_CRNL:\n if line[pos]=='\\r'or line[pos]=='\\n':\n pass\n else :\n raise Error(\"new-line character seen in unquoted field - \"\n \"do you need to open the file \"\n \"in universal-newline mode?\")\n \n else :\n raise RuntimeError(\"unknown state: %r\"%(self.state,))\n \n pos +=1\n \n def _parse_eol(self):\n if self.state ==self.EAT_CRNL:\n self.state=self.START_RECORD\n elif self.state ==self.START_RECORD:\n \n pass\n elif self.state ==self.IN_FIELD:\n \n \n self._parse_save_field()\n self.state=self.START_RECORD\n elif self.state ==self.START_FIELD:\n \n self._parse_save_field()\n self.state=self.START_RECORD\n elif self.state ==self.ESCAPED_CHAR:\n self._parse_add_char('\\n')\n self.state=self.IN_FIELD\n elif self.state ==self.IN_QUOTED_FIELD:\n pass\n elif self.state ==self.ESCAPE_IN_QUOTED_FIELD:\n self._parse_add_char('\\n')\n self.state=self.IN_QUOTED_FIELD\n elif self.state ==self.QUOTE_IN_QUOTED_FIELD:\n \n self._parse_save_field()\n self.state=self.START_RECORD\n else :\n raise RuntimeError(\"unknown state: %r\"%(self.state,))\n \n def _parse_save_field(self):\n field,self.field=self.field,''\n if self.numeric_field:\n self.numeric_field=False\n field=float(field)\n self.fields.append(field)\n \n def _parse_add_char(self,c):\n if len(self.field)+1 >_field_limit:\n raise Error(\"field larget than field limit (%d)\"%(_field_limit))\n self.field +=c\n \n def _parse_add_str(self,s):\n if len(self.field)+len(s)>_field_limit:\n raise Error(\"field larget than field limit (%d)\"%(_field_limit))\n self.field +=s\n \n \nclass Writer(object):\n ''\n\n\n \n \n def __init__(self,file,dialect=None ,**kwargs):\n if not (hasattr(file,'write')and callable(file.write)):\n raise TypeError(\"argument 1 must have a 'write' method\")\n self.writeline=file.write\n self.dialect=_call_dialect(dialect,kwargs)\n \n def _join_reset(self):\n self.rec=[]\n self.num_fields=0\n \n def _join_append(self,field,quoted,quote_empty):\n dialect=self.dialect\n \n if self.num_fields >0:\n self.rec.append(dialect.delimiter)\n \n if dialect.quoting ==QUOTE_NONE:\n need_escape=tuple(dialect.lineterminator)+(\n dialect.escapechar,\n dialect.delimiter,dialect.quotechar)\n \n else :\n for c in tuple(dialect.lineterminator)+(\n dialect.delimiter,dialect.escapechar):\n if c and c in field:\n quoted=True\n \n need_escape=()\n if dialect.quotechar in field:\n if dialect.doublequote:\n field=field.replace(dialect.quotechar,\n dialect.quotechar *2)\n quoted=True\n else :\n need_escape=(dialect.quotechar,)\n \n \n for c in need_escape:\n if c and c in field:\n if not dialect.escapechar:\n raise Error(\"need to escape, but no escapechar set\")\n field=field.replace(c,dialect.escapechar+c)\n \n \n if field ==''and quote_empty:\n if dialect.quoting ==QUOTE_NONE:\n raise Error(\"single empty field record must be quoted\")\n quoted=1\n \n if quoted:\n field=dialect.quotechar+field+dialect.quotechar\n \n self.rec.append(field)\n self.num_fields +=1\n \n \n \n def writerow(self,row):\n dialect=self.dialect\n try :\n rowlen=len(row)\n except TypeError:\n raise Error(\"sequence expected\")\n \n \n self._join_reset()\n \n for field in row:\n quoted=False\n if dialect.quoting ==QUOTE_NONNUMERIC:\n try :\n float(field)\n except :\n quoted=True\n \n \n elif dialect.quoting ==QUOTE_ALL:\n quoted=True\n \n if field is None :\n self._join_append(\"\",quoted,rowlen ==1)\n else :\n self._join_append(str(field),quoted,rowlen ==1)\n \n \n self.rec.append(dialect.lineterminator)\n \n self.writeline(''.join(self.rec))\n \n def writerows(self,rows):\n for row in rows:\n self.writerow(row)\n \ndef reader(*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n return Reader(*args,**kwargs)\n \ndef writer(*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n return Writer(*args,**kwargs)\n \n \nundefined=object()\ndef field_size_limit(limit=undefined):\n ''\n\n\n\n \n \n global _field_limit\n old_limit=_field_limit\n \n if limit is not undefined:\n if not isinstance(limit,(int,long)):\n raise TypeError(\"int expected, got %s\"%\n (limit.__class__.__name__,))\n _field_limit=limit\n \n return old_limit\n", []], "_dummy_thread": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['error','start_new_thread','exit','get_ident','allocate_lock',\n'interrupt_main','LockType','RLock']\n\n\nTIMEOUT_MAX=2 **31\n\n\n\n\n\n\nerror=RuntimeError\n\ndef start_new_thread(function,args,kwargs={}):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if type(args)!=type(tuple()):\n raise TypeError(\"2nd arg must be a tuple\")\n if type(kwargs)!=type(dict()):\n raise TypeError(\"3rd arg must be a dict\")\n global _main\n _main=False\n try :\n function(*args,**kwargs)\n except SystemExit:\n pass\n except :\n import traceback\n traceback.print_exc()\n _main=True\n global _interrupt\n if _interrupt:\n _interrupt=False\n raise KeyboardInterrupt\n \ndef exit():\n ''\n raise SystemExit\n \ndef get_ident():\n ''\n\n\n\n\n \n return 1\n \ndef allocate_lock():\n ''\n return LockType()\n \ndef stack_size(size=None ):\n ''\n if size is not None :\n raise error(\"setting thread stack size not supported\")\n return 0\n \ndef _set_sentinel():\n ''\n return LockType()\n \nclass LockType(object):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self):\n self.locked_status=False\n \n def acquire(self,waitflag=None ,timeout=-1):\n ''\n\n\n\n\n\n\n\n\n \n if waitflag is None or waitflag:\n self.locked_status=True\n return True\n else :\n if not self.locked_status:\n self.locked_status=True\n return True\n else :\n if timeout >0:\n import time\n time.sleep(timeout)\n return False\n \n __enter__=acquire\n \n def __exit__(self,typ,val,tb):\n self.release()\n \n def release(self):\n ''\n \n \n if not self.locked_status:\n raise error\n self.locked_status=False\n return True\n \n def locked(self):\n return self.locked_status\n \n def __repr__(self):\n return \"<%s %s.%s object at %s>\"%(\n \"locked\"if self.locked_status else \"unlocked\",\n self.__class__.__module__,\n self.__class__.__qualname__,\n hex(id(self))\n )\n \n \nclass RLock(LockType):\n ''\n\n\n\n\n\n \n def __init__(self):\n super().__init__()\n self._levels=0\n \n def acquire(self,waitflag=None ,timeout=-1):\n ''\n \n locked=super().acquire(waitflag,timeout)\n if locked:\n self._levels +=1\n return locked\n \n def release(self):\n ''\n \n if self._levels ==0:\n raise error\n if self._levels ==1:\n super().release()\n self._levels -=1\n \n \n_interrupt=False\n\n_main=True\n\ndef interrupt_main():\n ''\n \n if _main:\n raise KeyboardInterrupt\n else :\n global _interrupt\n _interrupt=True\n", ["time", "traceback"]], "_frozen_importlib": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_bootstrap_external=None\n_thread=None\nimport _weakref\n\nimport _imp\nimport sys\n\ndef _wrap(new,old):\n ''\n for replace in ['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n \ndef _new_module(name):\n return type(sys)(name)\n \n \n \n \n \n \n_module_locks={}\n\n_blocking_on={}\n\n\nclass _DeadlockError(RuntimeError):\n pass\n \n \nclass _ModuleLock:\n ''\n\n\n \n \n def __init__(self,name):\n self.lock=_thread.allocate_lock()\n self.wakeup=_thread.allocate_lock()\n self.name=name\n self.owner=None\n self.count=0\n self.waiters=0\n \n def has_deadlock(self):\n \n me=_thread.get_ident()\n tid=self.owner\n while True :\n lock=_blocking_on.get(tid)\n if lock is None :\n return False\n tid=lock.owner\n if tid ==me:\n return True\n \n def acquire(self):\n ''\n\n\n\n \n tid=_thread.get_ident()\n _blocking_on[tid]=self\n try :\n while True :\n with self.lock:\n if self.count ==0 or self.owner ==tid:\n self.owner=tid\n self.count +=1\n return True\n if self.has_deadlock():\n raise _DeadlockError('deadlock detected by %r'%self)\n if self.wakeup.acquire(False ):\n self.waiters +=1\n \n self.wakeup.acquire()\n self.wakeup.release()\n finally :\n del _blocking_on[tid]\n \n def release(self):\n tid=_thread.get_ident()\n with self.lock:\n if self.owner !=tid:\n raise RuntimeError('cannot release un-acquired lock')\n assert self.count >0\n self.count -=1\n if self.count ==0:\n self.owner=None\n if self.waiters:\n self.waiters -=1\n self.wakeup.release()\n \n def __repr__(self):\n return '_ModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _DummyModuleLock:\n ''\n \n \n def __init__(self,name):\n self.name=name\n self.count=0\n \n def acquire(self):\n self.count +=1\n return True\n \n def release(self):\n if self.count ==0:\n raise RuntimeError('cannot release un-acquired lock')\n self.count -=1\n \n def __repr__(self):\n return '_DummyModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _ModuleLockManager:\n\n def __init__(self,name):\n self._name=name\n self._lock=None\n \n def __enter__(self):\n self._lock=_get_module_lock(self._name)\n self._lock.acquire()\n \n def __exit__(self,*args,**kwargs):\n self._lock.release()\n \n \n \n \ndef _get_module_lock(name):\n ''\n\n\n \n \n _imp.acquire_lock()\n try :\n try :\n lock=_module_locks[name]()\n except KeyError:\n lock=None\n \n if lock is None :\n if _thread is None :\n lock=_DummyModuleLock(name)\n else :\n lock=_ModuleLock(name)\n \n def cb(ref,name=name):\n _imp.acquire_lock()\n try :\n \n \n \n if _module_locks.get(name)is ref:\n del _module_locks[name]\n finally :\n _imp.release_lock()\n \n _module_locks[name]=_weakref.ref(lock,cb)\n finally :\n _imp.release_lock()\n \n return lock\n \n \ndef _lock_unlock_module(name):\n ''\n\n\n\n \n lock=_get_module_lock(name)\n try :\n lock.acquire()\n except _DeadlockError:\n \n \n pass\n else :\n lock.release()\n \n \ndef _call_with_frames_removed(f,*args,**kwds):\n ''\n\n\n\n\n\n \n return f(*args,**kwds)\n \n \ndef _verbose_message(message,*args,verbosity=1):\n ''\n if sys.flags.verbose >=verbosity:\n if not message.startswith(('#','import ')):\n message='# '+message\n print(message.format(*args),file=sys.stderr)\n \n \ndef _requires_builtin(fxn):\n ''\n def _requires_builtin_wrapper(self,fullname):\n if fullname not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_builtin_wrapper,fxn)\n return _requires_builtin_wrapper\n \n \ndef _requires_frozen(fxn):\n ''\n def _requires_frozen_wrapper(self,fullname):\n if not _imp.is_frozen(fullname):\n raise ImportError('{!r} is not a frozen module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_frozen_wrapper,fxn)\n return _requires_frozen_wrapper\n \n \n \ndef _load_module_shim(self,fullname):\n ''\n\n\n\n \n spec=spec_from_loader(fullname,self)\n if fullname in sys.modules:\n module=sys.modules[fullname]\n _exec(spec,module)\n return sys.modules[fullname]\n else :\n return _load(spec)\n \n \n \ndef _module_repr(module):\n\n loader=getattr(module,'__loader__',None )\n if hasattr(loader,'module_repr'):\n \n \n \n try :\n return loader.module_repr(module)\n except Exception:\n pass\n try :\n spec=module.__spec__\n except AttributeError:\n pass\n else :\n if spec is not None :\n return _module_repr_from_spec(spec)\n \n \n \n try :\n name=module.__name__\n except AttributeError:\n name='?'\n try :\n filename=module.__file__\n except AttributeError:\n if loader is None :\n return ''.format(name)\n else :\n return ''.format(name,loader)\n else :\n return ''.format(name,filename)\n \n \nclass _installed_safely:\n\n def __init__(self,module):\n self._module=module\n self._spec=module.__spec__\n \n def __enter__(self):\n \n \n \n self._spec._initializing=True\n sys.modules[self._spec.name]=self._module\n \n def __exit__(self,*args):\n try :\n spec=self._spec\n if any(arg is not None for arg in args):\n try :\n del sys.modules[spec.name]\n except KeyError:\n pass\n else :\n _verbose_message('import {!r} # {!r}',spec.name,spec.loader)\n finally :\n self._spec._initializing=False\n \n \nclass ModuleSpec:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,name,loader,*,origin=None ,loader_state=None ,\n is_package=None ):\n self.name=name\n self.loader=loader\n self.origin=origin\n self.loader_state=loader_state\n self.submodule_search_locations=[]if is_package else None\n \n \n self._set_fileattr=False\n self._cached=None\n \n def __repr__(self):\n args=['name={!r}'.format(self.name),\n 'loader={!r}'.format(self.loader)]\n if self.origin is not None :\n args.append('origin={!r}'.format(self.origin))\n if self.submodule_search_locations is not None :\n args.append('submodule_search_locations={}'\n .format(self.submodule_search_locations))\n return '{}({})'.format(self.__class__.__name__,', '.join(args))\n \n def __eq__(self,other):\n smsl=self.submodule_search_locations\n try :\n return (self.name ==other.name and\n self.loader ==other.loader and\n self.origin ==other.origin and\n smsl ==other.submodule_search_locations and\n self.cached ==other.cached and\n self.has_location ==other.has_location)\n except AttributeError:\n return False\n \n @property\n def cached(self):\n if self._cached is None :\n if self.origin is not None and self._set_fileattr:\n if _bootstrap_external is None :\n raise NotImplementedError\n self._cached=_bootstrap_external._get_cached(self.origin)\n return self._cached\n \n @cached.setter\n def cached(self,cached):\n self._cached=cached\n \n @property\n def parent(self):\n ''\n if self.submodule_search_locations is None :\n return self.name.rpartition('.')[0]\n else :\n return self.name\n \n @property\n def has_location(self):\n return self._set_fileattr\n \n @has_location.setter\n def has_location(self,value):\n self._set_fileattr=bool(value)\n \n \ndef spec_from_loader(name,loader,*,origin=None ,is_package=None ):\n ''\n if hasattr(loader,'get_filename'):\n if _bootstrap_external is None :\n raise NotImplementedError\n spec_from_file_location=_bootstrap_external.spec_from_file_location\n \n if is_package is None :\n return spec_from_file_location(name,loader=loader)\n search=[]if is_package else None\n return spec_from_file_location(name,loader=loader,\n submodule_search_locations=search)\n \n if is_package is None :\n if hasattr(loader,'is_package'):\n try :\n is_package=loader.is_package(name)\n except ImportError:\n is_package=None\n else :\n \n is_package=False\n \n return ModuleSpec(name,loader,origin=origin,is_package=is_package)\n \n \ndef _spec_from_module(module,loader=None ,origin=None ):\n\n try :\n spec=module.__spec__\n except AttributeError:\n pass\n else :\n if spec is not None :\n return spec\n \n name=module.__name__\n if loader is None :\n try :\n loader=module.__loader__\n except AttributeError:\n \n pass\n try :\n location=module.__file__\n except AttributeError:\n location=None\n if origin is None :\n if location is None :\n try :\n origin=loader._ORIGIN\n except AttributeError:\n origin=None\n else :\n origin=location\n try :\n cached=module.__cached__\n except AttributeError:\n cached=None\n try :\n submodule_search_locations=list(module.__path__)\n except AttributeError:\n submodule_search_locations=None\n \n spec=ModuleSpec(name,loader,origin=origin)\n spec._set_fileattr=False if location is None else True\n spec.cached=cached\n spec.submodule_search_locations=submodule_search_locations\n return spec\n \n \ndef _init_module_attrs(spec,module,*,override=False ):\n\n\n\n if (override or getattr(module,'__name__',None )is None ):\n try :\n module.__name__=spec.name\n except AttributeError:\n pass\n \n if override or getattr(module,'__loader__',None )is None :\n loader=spec.loader\n if loader is None :\n \n if spec.submodule_search_locations is not None :\n if _bootstrap_external is None :\n raise NotImplementedError\n _NamespaceLoader=_bootstrap_external._NamespaceLoader\n \n loader=_NamespaceLoader.__new__(_NamespaceLoader)\n loader._path=spec.submodule_search_locations\n spec.loader=loader\n \n \n \n \n \n \n \n \n \n \n module.__file__=None\n try :\n module.__loader__=loader\n except AttributeError:\n pass\n \n if override or getattr(module,'__package__',None )is None :\n try :\n module.__package__=spec.parent\n except AttributeError:\n pass\n \n try :\n module.__spec__=spec\n except AttributeError:\n pass\n \n if override or getattr(module,'__path__',None )is None :\n if spec.submodule_search_locations is not None :\n try :\n module.__path__=spec.submodule_search_locations\n except AttributeError:\n pass\n \n if spec.has_location:\n if override or getattr(module,'__file__',None )is None :\n try :\n module.__file__=spec.origin\n except AttributeError:\n pass\n \n if override or getattr(module,'__cached__',None )is None :\n if spec.cached is not None :\n try :\n module.__cached__=spec.cached\n except AttributeError:\n pass\n return module\n \n \ndef module_from_spec(spec):\n ''\n \n module=None\n if hasattr(spec.loader,'create_module'):\n \n \n module=spec.loader.create_module(spec)\n elif hasattr(spec.loader,'exec_module'):\n raise ImportError('loaders that define exec_module() '\n 'must also define create_module()')\n if module is None :\n module=_new_module(spec.name)\n _init_module_attrs(spec,module)\n return module\n \n \ndef _module_repr_from_spec(spec):\n ''\n \n name='?'if spec.name is None else spec.name\n if spec.origin is None :\n if spec.loader is None :\n return ''.format(name)\n else :\n return ''.format(name,spec.loader)\n else :\n if spec.has_location:\n return ''.format(name,spec.origin)\n else :\n return ''.format(spec.name,spec.origin)\n \n \n \ndef _exec(spec,module):\n ''\n name=spec.name\n with _ModuleLockManager(name):\n if sys.modules.get(name)is not module:\n msg='module {!r} not in sys.modules'.format(name)\n raise ImportError(msg,name=name)\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('missing loader',name=spec.name)\n \n _init_module_attrs(spec,module,override=True )\n return module\n _init_module_attrs(spec,module,override=True )\n if not hasattr(spec.loader,'exec_module'):\n \n \n \n spec.loader.load_module(name)\n else :\n spec.loader.exec_module(module)\n return sys.modules[name]\n \n \ndef _load_backward_compatible(spec):\n\n\n\n spec.loader.load_module(spec.name)\n \n module=sys.modules[spec.name]\n if getattr(module,'__loader__',None )is None :\n try :\n module.__loader__=spec.loader\n except AttributeError:\n pass\n if getattr(module,'__package__',None )is None :\n try :\n \n \n \n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=spec.name.rpartition('.')[0]\n except AttributeError:\n pass\n if getattr(module,'__spec__',None )is None :\n try :\n module.__spec__=spec\n except AttributeError:\n pass\n return module\n \ndef _load_unlocked(spec):\n\n if spec.loader is not None :\n \n if not hasattr(spec.loader,'exec_module'):\n return _load_backward_compatible(spec)\n \n module=module_from_spec(spec)\n with _installed_safely(module):\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('missing loader',name=spec.name)\n \n else :\n spec.loader.exec_module(module)\n \n \n \n \n return sys.modules[spec.name]\n \n \n \ndef _load(spec):\n ''\n\n\n\n\n\n\n \n with _ModuleLockManager(spec.name):\n return _load_unlocked(spec)\n \n \n \n \nclass BuiltinImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(module):\n ''\n\n\n\n \n return ''.format(module.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n if path is not None :\n return None\n if _imp.is_builtin(fullname):\n return spec_from_loader(fullname,cls,origin='built-in')\n else :\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n\n\n \n spec=cls.find_spec(fullname,path)\n return spec.loader if spec is not None else None\n \n @classmethod\n def create_module(self,spec):\n ''\n if spec.name not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(spec.name),\n name=spec.name)\n return _call_with_frames_removed(_imp.create_builtin,spec)\n \n @classmethod\n def exec_module(self,module):\n ''\n _call_with_frames_removed(_imp.exec_builtin,module)\n \n @classmethod\n @_requires_builtin\n def get_code(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def is_package(cls,fullname):\n ''\n return False\n \n load_module=classmethod(_load_module_shim)\n \n \nclass FrozenImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(m):\n ''\n\n\n\n \n return ''.format(m.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n if _imp.is_frozen(fullname):\n return spec_from_loader(fullname,cls,origin='frozen')\n else :\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n \n return cls if _imp.is_frozen(fullname)else None\n \n @classmethod\n def create_module(cls,spec):\n ''\n \n @staticmethod\n def exec_module(module):\n name=module.__spec__.name\n if not _imp.is_frozen(name):\n raise ImportError('{!r} is not a frozen module'.format(name),\n name=name)\n code=_call_with_frames_removed(_imp.get_frozen_object,name)\n exec(code,module.__dict__)\n \n @classmethod\n def load_module(cls,fullname):\n ''\n\n\n\n \n return _load_module_shim(cls,fullname)\n \n @classmethod\n @_requires_frozen\n def get_code(cls,fullname):\n ''\n return _imp.get_frozen_object(fullname)\n \n @classmethod\n @_requires_frozen\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_frozen\n def is_package(cls,fullname):\n ''\n return _imp.is_frozen_package(fullname)\n \n \n \n \nclass _ImportLockContext:\n\n ''\n \n def __enter__(self):\n ''\n _imp.acquire_lock()\n \n def __exit__(self,exc_type,exc_value,exc_traceback):\n ''\n _imp.release_lock()\n \n \ndef _resolve_name(name,package,level):\n ''\n bits=package.rsplit('.',level -1)\n if len(bits)= 0')\n if level >0:\n if not isinstance(package,str):\n raise TypeError('__package__ not set to a string')\n elif not package:\n raise ImportError('attempted relative import with no known parent '\n 'package')\n if not name and level ==0:\n raise ValueError('Empty module name')\n \n \n_ERR_MSG_PREFIX='No module named '\n_ERR_MSG=_ERR_MSG_PREFIX+'{!r}'\n\ndef _find_and_load_unlocked(name,import_):\n path=None\n parent=name.rpartition('.')[0]\n if parent:\n if parent not in sys.modules:\n _call_with_frames_removed(import_,parent)\n \n if name in sys.modules:\n return sys.modules[name]\n parent_module=sys.modules[parent]\n try :\n path=parent_module.__path__\n except AttributeError:\n msg=(_ERR_MSG+'; {!r} is not a package').format(name,parent)\n raise ModuleNotFoundError(msg,name=name)from None\n spec=_find_spec(name,path)\n if spec is None :\n raise ModuleNotFoundError(_ERR_MSG.format(name),name=name)\n else :\n module=_load_unlocked(spec)\n if parent:\n \n parent_module=sys.modules[parent]\n setattr(parent_module,name.rpartition('.')[2],module)\n return module\n \n \n_NEEDS_LOADING=object()\n\n\ndef _find_and_load(name,import_):\n ''\n with _ModuleLockManager(name):\n module=sys.modules.get(name,_NEEDS_LOADING)\n if module is _NEEDS_LOADING:\n return _find_and_load_unlocked(name,import_)\n \n if module is None :\n message=('import of {} halted; '\n 'None in sys.modules'.format(name))\n raise ModuleNotFoundError(message,name=name)\n \n _lock_unlock_module(name)\n return module\n \n \ndef _gcd_import(name,package=None ,level=0):\n ''\n\n\n\n\n\n\n \n _sanity_check(name,package,level)\n if level >0:\n name=_resolve_name(name,package,level)\n return _find_and_load(name,_gcd_import)\n \n \ndef _handle_fromlist(module,fromlist,import_,*,recursive=False ):\n ''\n\n\n\n\n\n \n \n \n if hasattr(module,'__path__'):\n for x in fromlist:\n if not isinstance(x,str):\n if recursive:\n where=module.__name__+'.__all__'\n else :\n where=\"``from list''\"\n raise TypeError(f\"Item in {where} must be str, \"\n f\"not {type(x).__name__}\")\n elif x =='*':\n if not recursive and hasattr(module,'__all__'):\n _handle_fromlist(module,module.__all__,import_,\n recursive=True )\n elif not hasattr(module,x):\n from_name='{}.{}'.format(module.__name__,x)\n try :\n _call_with_frames_removed(import_,from_name)\n except ModuleNotFoundError as exc:\n \n \n \n if (exc.name ==from_name and\n sys.modules.get(from_name,_NEEDS_LOADING)is not None ):\n continue\n raise\n return module\n \n \ndef _calc___package__(globals):\n ''\n\n\n\n\n \n package=globals.get('__package__')\n spec=globals.get('__spec__')\n if package is not None :\n if spec is not None and package !=spec.parent:\n _warnings.warn(\"__package__ != __spec__.parent \"\n f\"({package!r} != {spec.parent!r})\",\n ImportWarning,stacklevel=3)\n return package\n elif spec is not None :\n return spec.parent\n else :\n _warnings.warn(\"can't resolve package from __spec__ or __package__, \"\n \"falling back on __name__ and __path__\",\n ImportWarning,stacklevel=3)\n package=globals['__name__']\n if '__path__'not in globals:\n package=package.rpartition('.')[0]\n return package\n \n \ndef __import__(name,globals=None ,locals=None ,fromlist=(),level=0):\n ''\n\n\n\n\n\n\n\n\n \n if level ==0:\n module=_gcd_import(name)\n else :\n globals_=globals if globals is not None else {}\n package=_calc___package__(globals_)\n module=_gcd_import(name,package,level)\n if not fromlist:\n \n \n if level ==0:\n return _gcd_import(name.partition('.')[0])\n elif not name:\n return module\n else :\n \n \n cut_off=len(name)-len(name.partition('.')[0])\n \n \n return sys.modules[module.__name__[:len(module.__name__)-cut_off]]\n else :\n return _handle_fromlist(module,fromlist,_gcd_import)\n \n \ndef _builtin_from_name(name):\n spec=BuiltinImporter.find_spec(name)\n if spec is None :\n raise ImportError('no built-in module named '+name)\n return _load_unlocked(spec)\n \n \nmodule_type=type(sys)\nfor name,module in sys.modules.items():\n if isinstance(module,module_type):\n if name in sys.builtin_module_names:\n loader=BuiltinImporter\n elif _imp.is_frozen(name):\n loader=FrozenImporter\n else :\n continue\n spec=_spec_from_module(module,loader)\n _init_module_attrs(spec,module)\n \n \nself_module=sys.modules[__name__]\n\n\nfor builtin_name in ('_warnings',):\n if builtin_name not in sys.modules:\n builtin_module=_builtin_from_name(builtin_name)\n else :\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n \ndef _install(sys_module,_imp_module):\n ''\n _setup(sys_module,_imp_module)\n \n sys.meta_path.append(BuiltinImporter)\n sys.meta_path.append(FrozenImporter)\n \n \ndef _install_external_importers():\n ''\n global _bootstrap_external\n import _frozen_importlib_external\n _bootstrap_external=_frozen_importlib_external\n _frozen_importlib_external._install(sys.modules[__name__])\n \n", ["_frozen_importlib_external", "_imp", "_weakref", "sys"]], "_functools": [".py", "from reprlib import recursive_repr\n\nclass partial:\n ''\n\n \n \n __slots__=\"func\",\"args\",\"keywords\",\"__dict__\",\"__weakref__\"\n \n def __new__(*args,**keywords):\n if not args:\n raise TypeError(\"descriptor '__new__' of partial needs an argument\")\n if len(args)<2:\n raise TypeError(\"type 'partial' takes at least one argument\")\n cls,func,*args=args\n if not callable(func):\n raise TypeError(\"the first argument must be callable\")\n args=tuple(args)\n \n if hasattr(func,\"func\"):\n args=func.args+args\n tmpkw=func.keywords.copy()\n tmpkw.update(keywords)\n keywords=tmpkw\n del tmpkw\n func=func.func\n \n self=super(partial,cls).__new__(cls)\n \n self.func=func\n self.args=args\n self.keywords=keywords\n return self\n \n def __call__(*args,**keywords):\n if not args:\n raise TypeError(\"descriptor '__call__' of partial needs an argument\")\n self,*args=args\n newkeywords=self.keywords.copy()\n newkeywords.update(keywords)\n return self.func(*self.args,*args,**newkeywords)\n \n @recursive_repr()\n def __repr__(self):\n qualname=type(self).__qualname__\n args=[repr(self.func)]\n args.extend(repr(x)for x in self.args)\n args.extend(f\"{k}={v!r}\"for (k,v)in self.keywords.items())\n if type(self).__module__ ==\"functools\":\n return f\"functools.{qualname}({', '.join(args)})\"\n return f\"{qualname}({', '.join(args)})\"\n \n def __reduce__(self):\n return type(self),(self.func,),(self.func,self.args,\n self.keywords or None ,self.__dict__ or None )\n \n def __setstate__(self,state):\n if not isinstance(state,tuple):\n raise TypeError(\"argument to __setstate__ must be a tuple\")\n if len(state)!=4:\n raise TypeError(f\"expected 4 items in state, got {len(state)}\")\n func,args,kwds,namespace=state\n if (not callable(func)or not isinstance(args,tuple)or\n (kwds is not None and not isinstance(kwds,dict))or\n (namespace is not None and not isinstance(namespace,dict))):\n raise TypeError(\"invalid partial state\")\n \n args=tuple(args)\n if kwds is None :\n kwds={}\n elif type(kwds)is not dict:\n kwds=dict(kwds)\n if namespace is None :\n namespace={}\n \n self.__dict__=namespace\n self.func=func\n self.args=args\n self.keywords=kwds\n \ndef reduce(func,iterable,initializer=None ):\n args=iter(iterable)\n if initializer is not None :\n res=initializer\n else :\n res=next(args)\n while True :\n try :\n res=func(res,next(args))\n except StopIteration:\n return res\n", ["reprlib"]], "_imp": [".py", "''\nimport sys\n\ndef _fix_co_filename(*args,**kw):\n ''\n\n\n\n \n pass\n \ndef acquire_lock(*args,**kw):\n ''\n\n \n pass\n \ncheck_hash_based_pycs=\"\"\"default\"\"\"\n\ndef create_builtin(spec):\n ''\n return __import__(spec.name)\n \ndef create_dynamic(*args,**kw):\n ''\n pass\n \ndef exec_builtin(*args,**kw):\n ''\n pass\n \ndef exec_dynamic(*args,**kw):\n ''\n pass\n \ndef extension_suffixes(*args,**kw):\n ''\n return []\n \ndef get_frozen_object(*args,**kw):\n ''\n pass\n \ndef init_frozen(*args,**kw):\n ''\n pass\n \ndef is_builtin(module_name):\n\n return module_name in __BRYTHON__.builtin_module_names\n \ndef is_frozen(*args,**kw):\n ''\n return False\n \ndef is_frozen_package(*args,**kw):\n ''\n pass\n \ndef lock_held(*args,**kw):\n ''\n \n return False\n \ndef release_lock(*args,**kw):\n ''\n \n pass\n \ndef source_hash(*args,**kw):\n pass\n", ["sys"]], "_io": [".py", "''\n\n\n\nimport os\nimport abc\nimport codecs\nimport errno\n\ntry :\n from _thread import allocate_lock as Lock\nexcept ImportError:\n from _dummy_thread import allocate_lock as Lock\n \n \nfrom _io_classes import *\nimport _io_classes\n_IOBase=_io_classes._IOBase\n_RawIOBase=_io_classes._RawIOBase\n_BufferedIOBase=_io_classes._BufferedIOBase\n_TextIOBase=_io_classes._TextIOBase\n\nSEEK_SET=0\nSEEK_CUR=1\nSEEK_END=2\n\nvalid_seek_flags={0,1,2}\nif hasattr(os,'SEEK_HOLE'):\n valid_seek_flags.add(os.SEEK_HOLE)\n valid_seek_flags.add(os.SEEK_DATA)\n \n \nDEFAULT_BUFFER_SIZE=8 *1024\n\n\n\n\n\n\nBlockingIOError=BlockingIOError\n\n\ndef __open(file,mode=\"r\",buffering=-1,encoding=None ,errors=None ,\nnewline=None ,closefd=True ,opener=None ):\n\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if not isinstance(file,(str,bytes,int)):\n raise TypeError(\"invalid file: %r\"%file)\n if not isinstance(mode,str):\n raise TypeError(\"invalid mode: %r\"%mode)\n if not isinstance(buffering,int):\n raise TypeError(\"invalid buffering: %r\"%buffering)\n if encoding is not None and not isinstance(encoding,str):\n raise TypeError(\"invalid encoding: %r\"%encoding)\n if errors is not None and not isinstance(errors,str):\n raise TypeError(\"invalid errors: %r\"%errors)\n modes=set(mode)\n if modes -set(\"axrwb+tU\")or len(mode)>len(modes):\n raise ValueError(\"invalid mode: %r\"%mode)\n creating=\"x\"in modes\n reading=\"r\"in modes\n writing=\"w\"in modes\n appending=\"a\"in modes\n updating=\"+\"in modes\n text=\"t\"in modes\n binary=\"b\"in modes\n if \"U\"in modes:\n if creating or writing or appending:\n raise ValueError(\"can't use U and writing mode at once\")\n reading=True\n if text and binary:\n raise ValueError(\"can't have text and binary mode at once\")\n if creating+reading+writing+appending >1:\n raise ValueError(\"can't have read/write/append mode at once\")\n if not (creating or reading or writing or appending):\n raise ValueError(\"must have exactly one of read/write/append mode\")\n if binary and encoding is not None :\n raise ValueError(\"binary mode doesn't take an encoding argument\")\n if binary and errors is not None :\n raise ValueError(\"binary mode doesn't take an errors argument\")\n if binary and newline is not None :\n raise ValueError(\"binary mode doesn't take a newline argument\")\n raw=FileIO(file,\n (creating and \"x\"or \"\")+\n (reading and \"r\"or \"\")+\n (writing and \"w\"or \"\")+\n (appending and \"a\"or \"\")+\n (updating and \"+\"or \"\"),\n closefd,opener=opener)\n line_buffering=False\n if buffering ==1 or buffering <0 and raw.isatty():\n buffering=-1\n line_buffering=True\n if buffering <0:\n buffering=DEFAULT_BUFFER_SIZE\n try :\n bs=os.fstat(raw.fileno()).st_blksize\n except (os.error,AttributeError):\n pass\n else :\n if bs >1:\n buffering=bs\n if buffering <0:\n raise ValueError(\"invalid buffering size\")\n if buffering ==0:\n if binary:\n return raw\n raise ValueError(\"can't have unbuffered text I/O\")\n if updating:\n buffer=BufferedRandom(raw,buffering)\n elif creating or writing or appending:\n buffer=BufferedWriter(raw,buffering)\n elif reading:\n buffer=BufferedReader(raw,buffering)\n else :\n raise ValueError(\"unknown mode: %r\"%mode)\n if binary:\n return buffer\n text=TextIOWrapper(buffer,encoding,errors,newline,line_buffering)\n text.mode=mode\n return text\n \nopen=__open\n\ndef open_code(file):\n return __builtins__.open(file,encoding=\"utf-8\")\n \nclass DocDescriptor:\n ''\n \n def __get__(self,obj,typ):\n return (\n \"open(file, mode='r', buffering=-1, encoding=None, \"\n \"errors=None, newline=None, closefd=True)\\n\\n\"+\n open.__doc__)\n \nclass OpenWrapper:\n ''\n\n\n\n\n\n \n __doc__=DocDescriptor()\n \n def __new__(cls,*args,**kwargs):\n return open(*args,**kwargs)\n \n \n \n \nclass UnsupportedOperation(ValueError,IOError):\n pass\n \n", ["_dummy_thread", "_io_classes", "_thread", "abc", "codecs", "errno", "os"]], "_markupbase": [".py", "''\n\n\n\n\n\n\nimport re\n\n_declname_match=re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\\s*').match\n_declstringlit_match=re.compile(r'(\\'[^\\']*\\'|\"[^\"]*\")\\s*').match\n_commentclose=re.compile(r'--\\s*>')\n_markedsectionclose=re.compile(r']\\s*]\\s*>')\n\n\n\n\n_msmarkedsectionclose=re.compile(r']\\s*>')\n\ndel re\n\n\nclass ParserBase:\n ''\n \n \n def __init__(self):\n if self.__class__ is ParserBase:\n raise RuntimeError(\n \"_markupbase.ParserBase must be subclassed\")\n \n def error(self,message):\n raise NotImplementedError(\n \"subclasses of ParserBase must override error()\")\n \n def reset(self):\n self.lineno=1\n self.offset=0\n \n def getpos(self):\n ''\n return self.lineno,self.offset\n \n \n \n \n \n def updatepos(self,i,j):\n if i >=j:\n return j\n rawdata=self.rawdata\n nlines=rawdata.count(\"\\n\",i,j)\n if nlines:\n self.lineno=self.lineno+nlines\n pos=rawdata.rindex(\"\\n\",i,j)\n self.offset=j -(pos+1)\n else :\n self.offset=self.offset+j -i\n return j\n \n _decl_otherchars=''\n \n \n def parse_declaration(self,i):\n \n \n \n \n \n \n \n \n \n \n rawdata=self.rawdata\n j=i+2\n assert rawdata[i:j]==\"\":\n \n return j+1\n if rawdata[j:j+1]in (\"-\",\"\"):\n \n \n return -1\n \n n=len(rawdata)\n if rawdata[j:j+2]=='--':\n \n return self.parse_comment(i)\n elif rawdata[j]=='[':\n \n \n \n \n return self.parse_marked_section(i)\n else :\n decltype,j=self._scan_name(j,i)\n if j <0:\n return j\n if decltype ==\"doctype\":\n self._decl_otherchars=''\n while j \":\n \n data=rawdata[i+2:j]\n if decltype ==\"doctype\":\n self.handle_decl(data)\n else :\n \n \n \n \n self.unknown_decl(data)\n return j+1\n if c in \"\\\"'\":\n m=_declstringlit_match(rawdata,j)\n if not m:\n return -1\n j=m.end()\n elif c in \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ\":\n name,j=self._scan_name(j,i)\n elif c in self._decl_otherchars:\n j=j+1\n elif c ==\"[\":\n \n if decltype ==\"doctype\":\n j=self._parse_doctype_subset(j+1,i)\n elif decltype in {\"attlist\",\"linktype\",\"link\",\"element\"}:\n \n \n \n \n self.error(\"unsupported '[' char in %s declaration\"%decltype)\n else :\n self.error(\"unexpected '[' char in declaration\")\n else :\n self.error(\n \"unexpected %r char in declaration\"%rawdata[j])\n if j <0:\n return j\n return -1\n \n \n \n def parse_marked_section(self,i,report=1):\n rawdata=self.rawdata\n assert rawdata[i:i+3]=='n:\n \n return -1\n if rawdata[j:j+4]==\"\n \n \"\"\"%(self.OutputString(attrs).replace('\"',r'\\\"'))\n \n def OutputString(self,attrs=None ):\n \n \n result=[]\n append=result.append\n \n \n append(\"%s=%s\"%(self.key,self.coded_value))\n \n \n if attrs is None :\n attrs=self._reserved\n items=sorted(self.items())\n for key,value in items:\n if value ==\"\":\n continue\n if key not in attrs:\n continue\n if key ==\"expires\"and isinstance(value,int):\n append(\"%s=%s\"%(self._reserved[key],_getdate(value)))\n elif key ==\"max-age\"and isinstance(value,int):\n append(\"%s=%d\"%(self._reserved[key],value))\n elif key ==\"secure\":\n append(str(self._reserved[key]))\n elif key ==\"httponly\":\n append(str(self._reserved[key]))\n else :\n append(\"%s=%s\"%(self._reserved[key],value))\n \n \n return _semispacejoin(result)\n \n \n \n \n \n \n \n \n \n \n \n_LegalCharsPatt=r\"[\\w\\d!#%&'~_`><@,:/\\$\\*\\+\\-\\.\\^\\|\\)\\(\\?\\}\\{\\=]\"\n_CookiePattern=re.compile(r\"\"\"\n (?x) # This is a verbose pattern\n (?P # Start of group 'key'\n \"\"\"+_LegalCharsPatt+r\"\"\"+? # Any word of at least one letter\n ) # End of group 'key'\n ( # Optional group: there may not be a value.\n \\s*=\\s* # Equal Sign\n (?P # Start of group 'val'\n \"(?:[^\\\\\"]|\\\\.)*\" # Any doublequoted string\n | # or\n \\w{3},\\s[\\w\\d\\s-]{9,11}\\s[\\d:]{8}\\sGMT # Special case for \"expires\" attr\n | # or\n \"\"\"+_LegalCharsPatt+r\"\"\"* # Any word or empty string\n ) # End of group 'val'\n )? # End of optional value group\n \\s* # Any number of spaces.\n (\\s+|;|$) # Ending either at space, semicolon, or EOS.\n \"\"\",re.ASCII)\n\n\n\n\n\nclass BaseCookie(dict):\n ''\n \n def value_decode(self,val):\n ''\n\n\n\n\n \n return val,val\n \n def value_encode(self,val):\n ''\n\n\n\n \n strval=str(val)\n return strval,strval\n \n def __init__(self,input=None ):\n if input:\n self.load(input)\n \n def __set(self,key,real_value,coded_value):\n ''\n M=self.get(key,Morsel())\n M.set(key,real_value,coded_value)\n dict.__setitem__(self,key,M)\n \n def __setitem__(self,key,value):\n ''\n rval,cval=self.value_encode(value)\n self.__set(key,rval,cval)\n \n def output(self,attrs=None ,header=\"Set-Cookie:\",sep=\"\\015\\012\"):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.output(attrs,header))\n return sep.join(result)\n \n __str__=output\n \n def __repr__(self):\n l=[]\n items=sorted(self.items())\n for key,value in items:\n l.append('%s=%s'%(key,repr(value.value)))\n return '<%s: %s>'%(self.__class__.__name__,_spacejoin(l))\n \n def js_output(self,attrs=None ):\n ''\n result=[]\n items=sorted(self.items())\n for key,value in items:\n result.append(value.js_output(attrs))\n return _nulljoin(result)\n \n def load(self,rawdata):\n ''\n\n\n\n \n if isinstance(rawdata,str):\n self.__parse_string(rawdata)\n else :\n \n for key,value in rawdata.items():\n self[key]=value\n return\n \n def __parse_string(self,str,patt=_CookiePattern):\n i=0\n n=len(str)\n M=None\n \n while 0 <=i ModuleType:\n ''\n if hasattr(name,'__spec__'):\n return name\n return import_module(name)\n \n \ndef _get_package(package)->ModuleType:\n ''\n\n\n\n \n module=_resolve(package)\n if module.__spec__.submodule_search_locations is None :\n raise TypeError('{!r} is not a package'.format(package))\n return module\n \n \ndef _normalize_path(path)->str:\n ''\n\n\n \n parent,file_name=os.path.split(path)\n if parent:\n raise ValueError('{!r} must be only a file name'.format(path))\n return file_name\n \n \ndef _get_resource_reader(\npackage:ModuleType)->Optional[resources_abc.ResourceReader]:\n\n\n\n\n\n spec=package.__spec__\n if hasattr(spec.loader,'get_resource_reader'):\n return cast(resources_abc.ResourceReader,\n spec.loader.get_resource_reader(spec.name))\n return None\n \n \ndef _check_location(package):\n if package.__spec__.origin is None or not package.__spec__.has_location:\n raise FileNotFoundError(f'Package has no location {package!r}')\n \n \ndef open_binary(package:Package,resource:Resource)->BinaryIO:\n ''\n resource=_normalize_path(resource)\n package=_get_package(package)\n reader=_get_resource_reader(package)\n if reader is not None :\n return reader.open_resource(resource)\n absolute_package_path=os.path.abspath(\n package.__spec__.origin or 'non-existent file')\n package_path=os.path.dirname(absolute_package_path)\n full_path=os.path.join(package_path,resource)\n try :\n return open(full_path,mode='rb')\n except OSError:\n \n \n \n loader=cast(ResourceLoader,package.__spec__.loader)\n data=None\n if hasattr(package.__spec__.loader,'get_data'):\n with suppress(OSError):\n data=loader.get_data(full_path)\n if data is None :\n package_name=package.__spec__.name\n message='{!r} resource not found in {!r}'.format(\n resource,package_name)\n raise FileNotFoundError(message)\n return BytesIO(data)\n \n \ndef open_text(package:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict')->TextIO:\n ''\n return TextIOWrapper(\n open_binary(package,resource),encoding=encoding,errors=errors)\n \n \ndef read_binary(package:Package,resource:Resource)->bytes:\n ''\n with open_binary(package,resource)as fp:\n return fp.read()\n \n \ndef read_text(package:Package,\nresource:Resource,\nencoding:str='utf-8',\nerrors:str='strict')->str:\n ''\n\n\n\n \n with open_text(package,resource,encoding,errors)as fp:\n return fp.read()\n \n \ndef files(package:Package)->resources_abc.Traversable:\n ''\n\n \n return _common.from_package(_get_package(package))\n \n \ndef path(\npackage:Package,resource:Resource,\n)->'ContextManager[Path]':\n ''\n\n\n\n\n\n\n \n reader=_get_resource_reader(_get_package(package))\n return (\n _path_from_reader(reader,resource)\n if reader else\n _common.as_file(files(package).joinpath(_normalize_path(resource)))\n )\n \n \n@contextmanager\ndef _path_from_reader(reader,resource):\n norm_resource=_normalize_path(resource)\n with suppress(FileNotFoundError):\n yield Path(reader.resource_path(norm_resource))\n return\n opener_reader=reader.open_resource(norm_resource)\n with _common._tempfile(opener_reader.read,suffix=norm_resource)as res:\n yield res\n \n \ndef is_resource(package:Package,name:str)->bool:\n ''\n\n\n \n package=_get_package(package)\n _normalize_path(name)\n reader=_get_resource_reader(package)\n if reader is not None :\n return reader.is_resource(name)\n package_contents=set(contents(package))\n if name not in package_contents:\n return False\n return (_common.from_package(package)/name).is_file()\n \n \ndef contents(package:Package)->Iterable[str]:\n ''\n\n\n\n\n \n package=_get_package(package)\n reader=_get_resource_reader(package)\n if reader is not None :\n return reader.contents()\n \n \n namespace=(\n package.__spec__.origin is None or\n package.__spec__.origin =='namespace'\n )\n if namespace or not package.__spec__.has_location:\n return ()\n return list(item.name for item in _common.from_package(package).iterdir())\n", ["contextlib", "importlib", "importlib._common", "importlib.abc", "io", "os", "pathlib", "types", "typing", "typing.io"]], "importlib.util": [".py", "''\nfrom . import abc\nfrom ._bootstrap import module_from_spec\nfrom ._bootstrap import _resolve_name\nfrom ._bootstrap import spec_from_loader\nfrom ._bootstrap import _find_spec\nfrom ._bootstrap_external import MAGIC_NUMBER\nfrom ._bootstrap_external import _RAW_MAGIC_NUMBER\nfrom ._bootstrap_external import cache_from_source\nfrom ._bootstrap_external import decode_source\nfrom ._bootstrap_external import source_from_cache\nfrom ._bootstrap_external import spec_from_file_location\n\nfrom contextlib import contextmanager\nimport _imp\nimport functools\nimport sys\nimport types\nimport warnings\n\n\ndef source_hash(source_bytes):\n ''\n return _imp.source_hash(_RAW_MAGIC_NUMBER,source_bytes)\n \n \ndef resolve_name(name,package):\n ''\n if not name.startswith('.'):\n return name\n elif not package:\n raise ImportError(f'no package specified for {repr(name)} '\n '(required for relative module names)')\n level=0\n for character in name:\n if character !='.':\n break\n level +=1\n return _resolve_name(name[level:],package,level)\n \n \ndef _find_spec_from_path(name,path=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if name not in sys.modules:\n return _find_spec(name,path)\n else :\n module=sys.modules[name]\n if module is None :\n return None\n try :\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else :\n if spec is None :\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \ndef find_spec(name,package=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n fullname=resolve_name(name,package)if name.startswith('.')else name\n if fullname not in sys.modules:\n parent_name=fullname.rpartition('.')[0]\n if parent_name:\n parent=__import__(parent_name,fromlist=['__path__'])\n try :\n parent_path=parent.__path__\n except AttributeError as e:\n raise ModuleNotFoundError(\n f\"__path__ attribute not found on {parent_name!r} \"\n f\"while trying to find {fullname!r}\",name=fullname)from e\n else :\n parent_path=None\n return _find_spec(fullname,parent_path)\n else :\n module=sys.modules[fullname]\n if module is None :\n return None\n try :\n spec=module.__spec__\n except AttributeError:\n raise ValueError('{}.__spec__ is not set'.format(name))from None\n else :\n if spec is None :\n raise ValueError('{}.__spec__ is None'.format(name))\n return spec\n \n \n@contextmanager\ndef _module_to_load(name):\n is_reload=name in sys.modules\n \n module=sys.modules.get(name)\n if not is_reload:\n \n \n \n module=type(sys)(name)\n \n \n module.__initializing__=True\n sys.modules[name]=module\n try :\n yield module\n except Exception:\n if not is_reload:\n try :\n del sys.modules[name]\n except KeyError:\n pass\n finally :\n module.__initializing__=False\n \n \ndef set_package(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_package_wrapper(*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically.',\n DeprecationWarning,stacklevel=2)\n module=fxn(*args,**kwargs)\n if getattr(module,'__package__',None )is None :\n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=module.__package__.rpartition('.')[0]\n return module\n return set_package_wrapper\n \n \ndef set_loader(fxn):\n ''\n\n\n\n \n @functools.wraps(fxn)\n def set_loader_wrapper(self,*args,**kwargs):\n warnings.warn('The import system now takes care of this automatically.',\n DeprecationWarning,stacklevel=2)\n module=fxn(self,*args,**kwargs)\n if getattr(module,'__loader__',None )is None :\n module.__loader__=self\n return module\n return set_loader_wrapper\n \n \ndef module_for_loader(fxn):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n warnings.warn('The import system now takes care of this automatically.',\n DeprecationWarning,stacklevel=2)\n @functools.wraps(fxn)\n def module_for_loader_wrapper(self,fullname,*args,**kwargs):\n with _module_to_load(fullname)as module:\n module.__loader__=self\n try :\n is_package=self.is_package(fullname)\n except (ImportError,AttributeError):\n pass\n else :\n if is_package:\n module.__package__=fullname\n else :\n module.__package__=fullname.rpartition('.')[0]\n \n return fxn(self,module,*args,**kwargs)\n \n return module_for_loader_wrapper\n \n \nclass _LazyModule(types.ModuleType):\n\n ''\n \n def __getattribute__(self,attr):\n ''\n \n \n \n self.__class__=types.ModuleType\n \n \n original_name=self.__spec__.name\n \n \n attrs_then=self.__spec__.loader_state['__dict__']\n original_type=self.__spec__.loader_state['__class__']\n attrs_now=self.__dict__\n attrs_updated={}\n for key,value in attrs_now.items():\n \n \n if key not in attrs_then:\n attrs_updated[key]=value\n elif id(attrs_now[key])!=id(attrs_then[key]):\n attrs_updated[key]=value\n self.__spec__.loader.exec_module(self)\n \n \n if original_name in sys.modules:\n if id(self)!=id(sys.modules[original_name]):\n raise ValueError(f\"module object for {original_name!r} \"\n \"substituted in sys.modules during a lazy \"\n \"load\")\n \n \n self.__dict__.update(attrs_updated)\n return getattr(self,attr)\n \n def __delattr__(self,attr):\n ''\n \n \n self.__getattribute__(attr)\n delattr(self,attr)\n \n \nclass LazyLoader(abc.Loader):\n\n ''\n \n @staticmethod\n def __check_eager_loader(loader):\n if not hasattr(loader,'exec_module'):\n raise TypeError('loader must define exec_module()')\n \n @classmethod\n def factory(cls,loader):\n ''\n cls.__check_eager_loader(loader)\n return lambda *args,**kwargs:cls(loader(*args,**kwargs))\n \n def __init__(self,loader):\n self.__check_eager_loader(loader)\n self.loader=loader\n \n def create_module(self,spec):\n return self.loader.create_module(spec)\n \n def exec_module(self,module):\n ''\n module.__spec__.loader=self.loader\n module.__loader__=self.loader\n \n \n \n \n loader_state={}\n loader_state['__dict__']=module.__dict__.copy()\n loader_state['__class__']=module.__class__\n module.__spec__.loader_state=loader_state\n module.__class__=_LazyModule\n", ["_imp", "contextlib", "functools", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "importlib.abc", "sys", "types", "warnings"]], "importlib._bootstrap": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n_bootstrap_external=None\n_thread=None\nimport _weakref\n\ndef _wrap(new,old):\n ''\n for replace in ['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n \n \ndef _new_module(name):\n return type(sys)(name)\n \n \n \n \n \n \n_module_locks={}\n\n_blocking_on={}\n\n\nclass _DeadlockError(RuntimeError):\n pass\n \n \nclass _ModuleLock:\n ''\n\n\n \n \n def __init__(self,name):\n self.lock=_thread.allocate_lock()\n self.wakeup=_thread.allocate_lock()\n self.name=name\n self.owner=None\n self.count=0\n self.waiters=0\n \n def has_deadlock(self):\n \n me=_thread.get_ident()\n tid=self.owner\n while True :\n lock=_blocking_on.get(tid)\n if lock is None :\n return False\n tid=lock.owner\n if tid ==me:\n return True\n \n def acquire(self):\n ''\n\n\n\n \n tid=_thread.get_ident()\n _blocking_on[tid]=self\n try :\n while True :\n with self.lock:\n if self.count ==0 or self.owner ==tid:\n self.owner=tid\n self.count +=1\n return True\n if self.has_deadlock():\n raise _DeadlockError('deadlock detected by %r'%self)\n if self.wakeup.acquire(False ):\n self.waiters +=1\n \n self.wakeup.acquire()\n self.wakeup.release()\n finally :\n del _blocking_on[tid]\n \n def release(self):\n tid=_thread.get_ident()\n with self.lock:\n if self.owner !=tid:\n raise RuntimeError('cannot release un-acquired lock')\n assert self.count >0\n self.count -=1\n if self.count ==0:\n self.owner=None\n if self.waiters:\n self.waiters -=1\n self.wakeup.release()\n \n def __repr__(self):\n return '_ModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _DummyModuleLock:\n ''\n \n \n def __init__(self,name):\n self.name=name\n self.count=0\n \n def acquire(self):\n self.count +=1\n return True\n \n def release(self):\n if self.count ==0:\n raise RuntimeError('cannot release un-acquired lock')\n self.count -=1\n \n def __repr__(self):\n return '_DummyModuleLock({!r}) at {}'.format(self.name,id(self))\n \n \nclass _ModuleLockManager:\n\n def __init__(self,name):\n self._name=name\n self._lock=None\n \n def __enter__(self):\n self._lock=_get_module_lock(self._name)\n self._lock.acquire()\n \n def __exit__(self,*args,**kwargs):\n self._lock.release()\n \n \n \n \ndef _get_module_lock(name):\n ''\n\n\n \n \n _imp.acquire_lock()\n try :\n try :\n lock=_module_locks[name]()\n except KeyError:\n lock=None\n \n if lock is None :\n if _thread is None :\n lock=_DummyModuleLock(name)\n else :\n lock=_ModuleLock(name)\n \n def cb(ref,name=name):\n _imp.acquire_lock()\n try :\n \n \n \n if _module_locks.get(name)is ref:\n del _module_locks[name]\n finally :\n _imp.release_lock()\n \n _module_locks[name]=_weakref.ref(lock,cb)\n finally :\n _imp.release_lock()\n \n return lock\n \n \ndef _lock_unlock_module(name):\n ''\n\n\n\n \n lock=_get_module_lock(name)\n try :\n lock.acquire()\n except _DeadlockError:\n \n \n pass\n else :\n lock.release()\n \n \ndef _call_with_frames_removed(f,*args,**kwds):\n ''\n\n\n\n\n\n \n return f(*args,**kwds)\n \n \ndef _verbose_message(message,*args,verbosity=1):\n ''\n if sys.flags.verbose >=verbosity:\n if not message.startswith(('#','import ')):\n message='# '+message\n print(message.format(*args),file=sys.stderr)\n \n \ndef _requires_builtin(fxn):\n ''\n def _requires_builtin_wrapper(self,fullname):\n if fullname not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_builtin_wrapper,fxn)\n return _requires_builtin_wrapper\n \n \ndef _requires_frozen(fxn):\n ''\n def _requires_frozen_wrapper(self,fullname):\n if not _imp.is_frozen(fullname):\n raise ImportError('{!r} is not a frozen module'.format(fullname),\n name=fullname)\n return fxn(self,fullname)\n _wrap(_requires_frozen_wrapper,fxn)\n return _requires_frozen_wrapper\n \n \n \ndef _load_module_shim(self,fullname):\n ''\n\n\n\n \n spec=spec_from_loader(fullname,self)\n if fullname in sys.modules:\n module=sys.modules[fullname]\n _exec(spec,module)\n return sys.modules[fullname]\n else :\n return _load(spec)\n \n \n \ndef _module_repr(module):\n\n loader=getattr(module,'__loader__',None )\n if hasattr(loader,'module_repr'):\n \n \n \n try :\n return loader.module_repr(module)\n except Exception:\n pass\n try :\n spec=module.__spec__\n except AttributeError:\n pass\n else :\n if spec is not None :\n return _module_repr_from_spec(spec)\n \n \n \n try :\n name=module.__name__\n except AttributeError:\n name='?'\n try :\n filename=module.__file__\n except AttributeError:\n if loader is None :\n return ''.format(name)\n else :\n return ''.format(name,loader)\n else :\n return ''.format(name,filename)\n \n \nclass _installed_safely:\n\n def __init__(self,module):\n self._module=module\n self._spec=module.__spec__\n \n def __enter__(self):\n \n \n \n self._spec._initializing=True\n sys.modules[self._spec.name]=self._module\n \n def __exit__(self,*args):\n try :\n spec=self._spec\n if any(arg is not None for arg in args):\n try :\n del sys.modules[spec.name]\n except KeyError:\n pass\n else :\n _verbose_message('import {!r} # {!r}',spec.name,spec.loader)\n finally :\n self._spec._initializing=False\n \n \nclass ModuleSpec:\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,name,loader,*,origin=None ,loader_state=None ,\n is_package=None ):\n self.name=name\n self.loader=loader\n self.origin=origin\n self.loader_state=loader_state\n self.submodule_search_locations=[]if is_package else None\n \n \n self._set_fileattr=False\n self._cached=None\n \n def __repr__(self):\n args=['name={!r}'.format(self.name),\n 'loader={!r}'.format(self.loader)]\n if self.origin is not None :\n args.append('origin={!r}'.format(self.origin))\n if self.submodule_search_locations is not None :\n args.append('submodule_search_locations={}'\n .format(self.submodule_search_locations))\n return '{}({})'.format(self.__class__.__name__,', '.join(args))\n \n def __eq__(self,other):\n smsl=self.submodule_search_locations\n try :\n return (self.name ==other.name and\n self.loader ==other.loader and\n self.origin ==other.origin and\n smsl ==other.submodule_search_locations and\n self.cached ==other.cached and\n self.has_location ==other.has_location)\n except AttributeError:\n return False\n \n @property\n def cached(self):\n if self._cached is None :\n if self.origin is not None and self._set_fileattr:\n if _bootstrap_external is None :\n raise NotImplementedError\n self._cached=_bootstrap_external._get_cached(self.origin)\n return self._cached\n \n @cached.setter\n def cached(self,cached):\n self._cached=cached\n \n @property\n def parent(self):\n ''\n if self.submodule_search_locations is None :\n return self.name.rpartition('.')[0]\n else :\n return self.name\n \n @property\n def has_location(self):\n return self._set_fileattr\n \n @has_location.setter\n def has_location(self,value):\n self._set_fileattr=bool(value)\n \n \ndef spec_from_loader(name,loader,*,origin=None ,is_package=None ):\n ''\n if hasattr(loader,'get_filename'):\n if _bootstrap_external is None :\n raise NotImplementedError\n spec_from_file_location=_bootstrap_external.spec_from_file_location\n \n if is_package is None :\n return spec_from_file_location(name,loader=loader)\n search=[]if is_package else None\n return spec_from_file_location(name,loader=loader,\n submodule_search_locations=search)\n \n if is_package is None :\n if hasattr(loader,'is_package'):\n try :\n is_package=loader.is_package(name)\n except ImportError:\n is_package=None\n else :\n \n is_package=False\n \n return ModuleSpec(name,loader,origin=origin,is_package=is_package)\n \n \ndef _spec_from_module(module,loader=None ,origin=None ):\n\n try :\n spec=module.__spec__\n except AttributeError:\n pass\n else :\n if spec is not None :\n return spec\n \n name=module.__name__\n if loader is None :\n try :\n loader=module.__loader__\n except AttributeError:\n \n pass\n try :\n location=module.__file__\n except AttributeError:\n location=None\n if origin is None :\n if location is None :\n try :\n origin=loader._ORIGIN\n except AttributeError:\n origin=None\n else :\n origin=location\n try :\n cached=module.__cached__\n except AttributeError:\n cached=None\n try :\n submodule_search_locations=list(module.__path__)\n except AttributeError:\n submodule_search_locations=None\n \n spec=ModuleSpec(name,loader,origin=origin)\n spec._set_fileattr=False if location is None else True\n spec.cached=cached\n spec.submodule_search_locations=submodule_search_locations\n return spec\n \n \ndef _init_module_attrs(spec,module,*,override=False ):\n\n\n\n if (override or getattr(module,'__name__',None )is None ):\n try :\n module.__name__=spec.name\n except AttributeError:\n pass\n \n if override or getattr(module,'__loader__',None )is None :\n loader=spec.loader\n if loader is None :\n \n if spec.submodule_search_locations is not None :\n if _bootstrap_external is None :\n raise NotImplementedError\n _NamespaceLoader=_bootstrap_external._NamespaceLoader\n \n loader=_NamespaceLoader.__new__(_NamespaceLoader)\n loader._path=spec.submodule_search_locations\n spec.loader=loader\n \n \n \n \n \n \n \n \n \n \n module.__file__=None\n try :\n module.__loader__=loader\n except AttributeError:\n pass\n \n if override or getattr(module,'__package__',None )is None :\n try :\n module.__package__=spec.parent\n except AttributeError:\n pass\n \n try :\n module.__spec__=spec\n except AttributeError:\n pass\n \n if override or getattr(module,'__path__',None )is None :\n if spec.submodule_search_locations is not None :\n try :\n module.__path__=spec.submodule_search_locations\n except AttributeError:\n pass\n \n if spec.has_location:\n if override or getattr(module,'__file__',None )is None :\n try :\n module.__file__=spec.origin\n except AttributeError:\n pass\n \n if override or getattr(module,'__cached__',None )is None :\n if spec.cached is not None :\n try :\n module.__cached__=spec.cached\n except AttributeError:\n pass\n return module\n \n \ndef module_from_spec(spec):\n ''\n \n module=None\n if hasattr(spec.loader,'create_module'):\n \n \n module=spec.loader.create_module(spec)\n elif hasattr(spec.loader,'exec_module'):\n raise ImportError('loaders that define exec_module() '\n 'must also define create_module()')\n if module is None :\n module=_new_module(spec.name)\n _init_module_attrs(spec,module)\n return module\n \n \ndef _module_repr_from_spec(spec):\n ''\n \n name='?'if spec.name is None else spec.name\n if spec.origin is None :\n if spec.loader is None :\n return ''.format(name)\n else :\n return ''.format(name,spec.loader)\n else :\n if spec.has_location:\n return ''.format(name,spec.origin)\n else :\n return ''.format(spec.name,spec.origin)\n \n \n \ndef _exec(spec,module):\n ''\n name=spec.name\n with _ModuleLockManager(name):\n if sys.modules.get(name)is not module:\n msg='module {!r} not in sys.modules'.format(name)\n raise ImportError(msg,name=name)\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('missing loader',name=spec.name)\n \n _init_module_attrs(spec,module,override=True )\n return module\n _init_module_attrs(spec,module,override=True )\n if not hasattr(spec.loader,'exec_module'):\n \n \n \n spec.loader.load_module(name)\n else :\n spec.loader.exec_module(module)\n return sys.modules[name]\n \n \ndef _load_backward_compatible(spec):\n\n\n\n spec.loader.load_module(spec.name)\n \n module=sys.modules[spec.name]\n if getattr(module,'__loader__',None )is None :\n try :\n module.__loader__=spec.loader\n except AttributeError:\n pass\n if getattr(module,'__package__',None )is None :\n try :\n \n \n \n module.__package__=module.__name__\n if not hasattr(module,'__path__'):\n module.__package__=spec.name.rpartition('.')[0]\n except AttributeError:\n pass\n if getattr(module,'__spec__',None )is None :\n try :\n module.__spec__=spec\n except AttributeError:\n pass\n return module\n \ndef _load_unlocked(spec):\n\n if spec.loader is not None :\n \n if not hasattr(spec.loader,'exec_module'):\n return _load_backward_compatible(spec)\n \n module=module_from_spec(spec)\n with _installed_safely(module):\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('missing loader',name=spec.name)\n \n else :\n spec.loader.exec_module(module)\n \n \n \n \n return sys.modules[spec.name]\n \n \n \ndef _load(spec):\n ''\n\n\n\n\n\n\n \n with _ModuleLockManager(spec.name):\n return _load_unlocked(spec)\n \n \n \n \nclass BuiltinImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(module):\n ''\n\n\n\n \n return ''.format(module.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n if path is not None :\n return None\n if _imp.is_builtin(fullname):\n return spec_from_loader(fullname,cls,origin='built-in')\n else :\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n\n\n \n spec=cls.find_spec(fullname,path)\n return spec.loader if spec is not None else None\n \n @classmethod\n def create_module(self,spec):\n ''\n if spec.name not in sys.builtin_module_names:\n raise ImportError('{!r} is not a built-in module'.format(spec.name),\n name=spec.name)\n return _call_with_frames_removed(_imp.create_builtin,spec)\n \n @classmethod\n def exec_module(self,module):\n ''\n _call_with_frames_removed(_imp.exec_builtin,module)\n \n @classmethod\n @_requires_builtin\n def get_code(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_builtin\n def is_package(cls,fullname):\n ''\n return False\n \n load_module=classmethod(_load_module_shim)\n \n \nclass FrozenImporter:\n\n ''\n\n\n\n\n \n \n @staticmethod\n def module_repr(m):\n ''\n\n\n\n \n return ''.format(m.__name__)\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n if _imp.is_frozen(fullname):\n return spec_from_loader(fullname,cls,origin='frozen')\n else :\n return None\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n \n return cls if _imp.is_frozen(fullname)else None\n \n @classmethod\n def create_module(cls,spec):\n ''\n \n @staticmethod\n def exec_module(module):\n name=module.__spec__.name\n if not _imp.is_frozen(name):\n raise ImportError('{!r} is not a frozen module'.format(name),\n name=name)\n code=_call_with_frames_removed(_imp.get_frozen_object,name)\n exec(code,module.__dict__)\n \n @classmethod\n def load_module(cls,fullname):\n ''\n\n\n\n \n return _load_module_shim(cls,fullname)\n \n @classmethod\n @_requires_frozen\n def get_code(cls,fullname):\n ''\n return _imp.get_frozen_object(fullname)\n \n @classmethod\n @_requires_frozen\n def get_source(cls,fullname):\n ''\n return None\n \n @classmethod\n @_requires_frozen\n def is_package(cls,fullname):\n ''\n return _imp.is_frozen_package(fullname)\n \n \n \n \nclass _ImportLockContext:\n\n ''\n \n def __enter__(self):\n ''\n _imp.acquire_lock()\n \n def __exit__(self,exc_type,exc_value,exc_traceback):\n ''\n _imp.release_lock()\n \n \ndef _resolve_name(name,package,level):\n ''\n bits=package.rsplit('.',level -1)\n if len(bits)= 0')\n if level >0:\n if not isinstance(package,str):\n raise TypeError('__package__ not set to a string')\n elif not package:\n raise ImportError('attempted relative import with no known parent '\n 'package')\n if not name and level ==0:\n raise ValueError('Empty module name')\n \n \n_ERR_MSG_PREFIX='No module named '\n_ERR_MSG=_ERR_MSG_PREFIX+'{!r}'\n\ndef _find_and_load_unlocked(name,import_):\n path=None\n parent=name.rpartition('.')[0]\n if parent:\n if parent not in sys.modules:\n _call_with_frames_removed(import_,parent)\n \n if name in sys.modules:\n return sys.modules[name]\n parent_module=sys.modules[parent]\n try :\n path=parent_module.__path__\n except AttributeError:\n msg=(_ERR_MSG+'; {!r} is not a package').format(name,parent)\n raise ModuleNotFoundError(msg,name=name)from None\n spec=_find_spec(name,path)\n if spec is None :\n raise ModuleNotFoundError(_ERR_MSG.format(name),name=name)\n else :\n module=_load_unlocked(spec)\n if parent:\n \n parent_module=sys.modules[parent]\n setattr(parent_module,name.rpartition('.')[2],module)\n return module\n \n \n_NEEDS_LOADING=object()\n\n\ndef _find_and_load(name,import_):\n ''\n with _ModuleLockManager(name):\n module=sys.modules.get(name,_NEEDS_LOADING)\n if module is _NEEDS_LOADING:\n return _find_and_load_unlocked(name,import_)\n \n if module is None :\n message=('import of {} halted; '\n 'None in sys.modules'.format(name))\n raise ModuleNotFoundError(message,name=name)\n \n _lock_unlock_module(name)\n return module\n \n \ndef _gcd_import(name,package=None ,level=0):\n ''\n\n\n\n\n\n\n \n _sanity_check(name,package,level)\n if level >0:\n name=_resolve_name(name,package,level)\n return _find_and_load(name,_gcd_import)\n \n \ndef _handle_fromlist(module,fromlist,import_,*,recursive=False ):\n ''\n\n\n\n\n\n \n \n \n if hasattr(module,'__path__'):\n for x in fromlist:\n if not isinstance(x,str):\n if recursive:\n where=module.__name__+'.__all__'\n else :\n where=\"``from list''\"\n raise TypeError(f\"Item in {where} must be str, \"\n f\"not {type(x).__name__}\")\n elif x =='*':\n if not recursive and hasattr(module,'__all__'):\n _handle_fromlist(module,module.__all__,import_,\n recursive=True )\n elif not hasattr(module,x):\n from_name='{}.{}'.format(module.__name__,x)\n try :\n _call_with_frames_removed(import_,from_name)\n except ModuleNotFoundError as exc:\n \n \n \n if (exc.name ==from_name and\n sys.modules.get(from_name,_NEEDS_LOADING)is not None ):\n continue\n raise\n return module\n \n \ndef _calc___package__(globals):\n ''\n\n\n\n\n \n package=globals.get('__package__')\n spec=globals.get('__spec__')\n if package is not None :\n if spec is not None and package !=spec.parent:\n _warnings.warn(\"__package__ != __spec__.parent \"\n f\"({package!r} != {spec.parent!r})\",\n ImportWarning,stacklevel=3)\n return package\n elif spec is not None :\n return spec.parent\n else :\n _warnings.warn(\"can't resolve package from __spec__ or __package__, \"\n \"falling back on __name__ and __path__\",\n ImportWarning,stacklevel=3)\n package=globals['__name__']\n if '__path__'not in globals:\n package=package.rpartition('.')[0]\n return package\n \n \ndef __import__(name,globals=None ,locals=None ,fromlist=(),level=0):\n ''\n\n\n\n\n\n\n\n\n \n if level ==0:\n module=_gcd_import(name)\n else :\n globals_=globals if globals is not None else {}\n package=_calc___package__(globals_)\n module=_gcd_import(name,package,level)\n if not fromlist:\n \n \n if level ==0:\n return _gcd_import(name.partition('.')[0])\n elif not name:\n return module\n else :\n \n \n cut_off=len(name)-len(name.partition('.')[0])\n \n \n return sys.modules[module.__name__[:len(module.__name__)-cut_off]]\n else :\n return _handle_fromlist(module,fromlist,_gcd_import)\n \n \ndef _builtin_from_name(name):\n spec=BuiltinImporter.find_spec(name)\n if spec is None :\n raise ImportError('no built-in module named '+name)\n return _load_unlocked(spec)\n \n \ndef _setup(sys_module,_imp_module):\n ''\n\n\n\n\n\n \n global _imp,sys\n _imp=_imp_module\n sys=sys_module\n \n \n module_type=type(sys)\n for name,module in sys.modules.items():\n if isinstance(module,module_type):\n if name in sys.builtin_module_names:\n loader=BuiltinImporter\n elif _imp.is_frozen(name):\n loader=FrozenImporter\n else :\n continue\n spec=_spec_from_module(module,loader)\n _init_module_attrs(spec,module)\n \n \n self_module=sys.modules[__name__]\n \n \n for builtin_name in ('_warnings',):\n if builtin_name not in sys.modules:\n builtin_module=_builtin_from_name(builtin_name)\n else :\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n \ndef _install(sys_module,_imp_module):\n ''\n _setup(sys_module,_imp_module)\n \n sys.meta_path.append(BuiltinImporter)\n sys.meta_path.append(FrozenImporter)\n \n \ndef _install_external_importers():\n ''\n global _bootstrap_external\n import _frozen_importlib_external\n _bootstrap_external=_frozen_importlib_external\n _frozen_importlib_external._install(sys.modules[__name__])\n", ["_frozen_importlib_external", "_weakref"]], "importlib._bootstrap_external": [".py", "''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nimport _io\n\n_CASE_INSENSITIVE_PLATFORMS_STR_KEY='win',\n_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY='cygwin','darwin'\n_CASE_INSENSITIVE_PLATFORMS=(_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY\n+_CASE_INSENSITIVE_PLATFORMS_STR_KEY)\n\n\ndef _make_relax_case():\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS_STR_KEY):\n key='PYTHONCASEOK'\n else :\n key=b'PYTHONCASEOK'\n \n def _relax_case():\n ''\n return key in _os.environ\n else :\n def _relax_case():\n ''\n return False\n return _relax_case\n \n \ndef _pack_uint32(x):\n ''\n return (int(x)&0xFFFFFFFF).to_bytes(4,'little')\n \n \ndef _unpack_uint32(data):\n ''\n assert len(data)==4\n return int.from_bytes(data,'little')\n \ndef _unpack_uint16(data):\n ''\n assert len(data)==2\n return int.from_bytes(data,'little')\n \n \ndef _path_join(*path_parts):\n ''\n return path_sep.join([part.rstrip(path_separators)\n for part in path_parts if part])\n \n \ndef _path_split(path):\n ''\n if len(path_separators)==1:\n front,_,tail=path.rpartition(path_sep)\n return front,tail\n for x in reversed(path):\n if x in path_separators:\n front,tail=path.rsplit(x,maxsplit=1)\n return front,tail\n return '',path\n \n \ndef _path_stat(path):\n ''\n\n\n\n\n \n return _os.stat(path)\n \n \ndef _path_is_mode_type(path,mode):\n ''\n try :\n stat_info=_path_stat(path)\n except OSError:\n return False\n return (stat_info.st_mode&0o170000)==mode\n \n \ndef _path_isfile(path):\n ''\n return _path_is_mode_type(path,0o100000)\n \n \ndef _path_isdir(path):\n ''\n if not path:\n path=_os.getcwd()\n return _path_is_mode_type(path,0o040000)\n \n \ndef _path_isabs(path):\n ''\n\n\n\n \n return path.startswith(path_separators)or path[1:3]in _pathseps_with_colon\n \n \ndef _write_atomic(path,data,mode=0o666):\n ''\n\n \n \n path_tmp='{}.{}'.format(path,id(path))\n fd=_os.open(path_tmp,\n _os.O_EXCL |_os.O_CREAT |_os.O_WRONLY,mode&0o666)\n try :\n \n \n with _io.FileIO(fd,'wb')as file:\n file.write(data)\n _os.replace(path_tmp,path)\n except OSError:\n try :\n _os.unlink(path_tmp)\n except OSError:\n pass\n raise\n \n \n_code_type=type(_write_atomic.__code__)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nMAGIC_NUMBER=(3413).to_bytes(2,'little')+b'\\r\\n'\n_RAW_MAGIC_NUMBER=int.from_bytes(MAGIC_NUMBER,'little')\n\n_PYCACHE='__pycache__'\n_OPT='opt-'\n\nSOURCE_SUFFIXES=['.py']\n\nBYTECODE_SUFFIXES=['.pyc']\n\nDEBUG_BYTECODE_SUFFIXES=OPTIMIZED_BYTECODE_SUFFIXES=BYTECODE_SUFFIXES\n\ndef cache_from_source(path,debug_override=None ,*,optimization=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if debug_override is not None :\n _warnings.warn('the debug_override parameter is deprecated; use '\n \"'optimization' instead\",DeprecationWarning)\n if optimization is not None :\n message='debug_override or optimization must be set to None'\n raise TypeError(message)\n optimization=''if debug_override else 1\n path=_os.fspath(path)\n head,tail=_path_split(path)\n base,sep,rest=tail.rpartition('.')\n tag=sys.implementation.cache_tag\n if tag is None :\n raise NotImplementedError('sys.implementation.cache_tag is None')\n almost_filename=''.join([(base if base else rest),sep,tag])\n if optimization is None :\n if sys.flags.optimize ==0:\n optimization=''\n else :\n optimization=sys.flags.optimize\n optimization=str(optimization)\n if optimization !='':\n if not optimization.isalnum():\n raise ValueError('{!r} is not alphanumeric'.format(optimization))\n almost_filename='{}.{}{}'.format(almost_filename,_OPT,optimization)\n filename=almost_filename+BYTECODE_SUFFIXES[0]\n if sys.pycache_prefix is not None :\n \n \n \n \n \n \n \n \n if not _path_isabs(head):\n head=_path_join(_os.getcwd(),head)\n \n \n \n \n if head[1]==':'and head[0]not in path_separators:\n head=head[2:]\n \n \n \n return _path_join(\n sys.pycache_prefix,\n head.lstrip(path_separators),\n filename,\n )\n return _path_join(head,_PYCACHE,filename)\n \n \ndef source_from_cache(path):\n ''\n\n\n\n\n\n\n \n if sys.implementation.cache_tag is None :\n raise NotImplementedError('sys.implementation.cache_tag is None')\n path=_os.fspath(path)\n head,pycache_filename=_path_split(path)\n found_in_pycache_prefix=False\n if sys.pycache_prefix is not None :\n stripped_path=sys.pycache_prefix.rstrip(path_separators)\n if head.startswith(stripped_path+path_sep):\n head=head[len(stripped_path):]\n found_in_pycache_prefix=True\n if not found_in_pycache_prefix:\n head,pycache=_path_split(head)\n if pycache !=_PYCACHE:\n raise ValueError(f'{_PYCACHE} not bottom-level directory in '\n f'{path!r}')\n dot_count=pycache_filename.count('.')\n if dot_count not in {2,3}:\n raise ValueError(f'expected only 2 or 3 dots in {pycache_filename!r}')\n elif dot_count ==3:\n optimization=pycache_filename.rsplit('.',2)[-2]\n if not optimization.startswith(_OPT):\n raise ValueError(\"optimization portion of filename does not start \"\n f\"with {_OPT!r}\")\n opt_level=optimization[len(_OPT):]\n if not opt_level.isalnum():\n raise ValueError(f\"optimization level {optimization!r} is not an \"\n \"alphanumeric value\")\n base_filename=pycache_filename.partition('.')[0]\n return _path_join(head,base_filename+SOURCE_SUFFIXES[0])\n \n \ndef _get_sourcefile(bytecode_path):\n ''\n\n\n\n\n \n if len(bytecode_path)==0:\n return None\n rest,_,extension=bytecode_path.rpartition('.')\n if not rest or extension.lower()[-3:-1]!='py':\n return bytecode_path\n try :\n source_path=source_from_cache(bytecode_path)\n except (NotImplementedError,ValueError):\n source_path=bytecode_path[:-1]\n return source_path if _path_isfile(source_path)else bytecode_path\n \n \ndef _get_cached(filename):\n if filename.endswith(tuple(SOURCE_SUFFIXES)):\n try :\n return cache_from_source(filename)\n except NotImplementedError:\n pass\n elif filename.endswith(tuple(BYTECODE_SUFFIXES)):\n return filename\n else :\n return None\n \n \ndef _calc_mode(path):\n ''\n try :\n mode=_path_stat(path).st_mode\n except OSError:\n mode=0o666\n \n \n mode |=0o200\n return mode\n \n \ndef _check_name(method):\n ''\n\n\n\n\n\n \n def _check_name_wrapper(self,name=None ,*args,**kwargs):\n if name is None :\n name=self.name\n elif self.name !=name:\n raise ImportError('loader for %s cannot handle %s'%\n (self.name,name),name=name)\n return method(self,name,*args,**kwargs)\n try :\n _wrap=_bootstrap._wrap\n except NameError:\n \n def _wrap(new,old):\n for replace in ['__module__','__name__','__qualname__','__doc__']:\n if hasattr(old,replace):\n setattr(new,replace,getattr(old,replace))\n new.__dict__.update(old.__dict__)\n _wrap(_check_name_wrapper,method)\n return _check_name_wrapper\n \n \ndef _find_module_shim(self,fullname):\n ''\n\n\n\n\n \n \n \n \n loader,portions=self.find_loader(fullname)\n if loader is None and len(portions):\n msg='Not importing directory {}: missing __init__'\n _warnings.warn(msg.format(portions[0]),ImportWarning)\n return loader\n \n \ndef _classify_pyc(data,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n magic=data[:4]\n if magic !=MAGIC_NUMBER:\n message=f'bad magic number in {name!r}: {magic!r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if len(data)<16:\n message=f'reached EOF while reading pyc header of {name!r}'\n _bootstrap._verbose_message('{}',message)\n raise EOFError(message)\n flags=_unpack_uint32(data[4:8])\n \n if flags&~0b11:\n message=f'invalid flags {flags!r} in {name!r}'\n raise ImportError(message,**exc_details)\n return flags\n \n \ndef _validate_timestamp_pyc(data,source_mtime,source_size,name,\nexc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _unpack_uint32(data[8:12])!=(source_mtime&0xFFFFFFFF):\n message=f'bytecode is stale for {name!r}'\n _bootstrap._verbose_message('{}',message)\n raise ImportError(message,**exc_details)\n if (source_size is not None and\n _unpack_uint32(data[12:16])!=(source_size&0xFFFFFFFF)):\n raise ImportError(f'bytecode is stale for {name!r}',**exc_details)\n \n \ndef _validate_hash_pyc(data,source_hash,name,exc_details):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if data[8:16]!=source_hash:\n raise ImportError(\n f'hash in bytecode doesn\\'t match hash of source {name!r}',\n **exc_details,\n )\n \n \ndef _compile_bytecode(data,name=None ,bytecode_path=None ,source_path=None ):\n ''\n code=marshal.loads(data)\n if isinstance(code,_code_type):\n _bootstrap._verbose_message('code object from {!r}',bytecode_path)\n if source_path is not None :\n _imp._fix_co_filename(code,source_path)\n return code\n else :\n raise ImportError('Non-code object in {!r}'.format(bytecode_path),\n name=name,path=bytecode_path)\n \n \ndef _code_to_timestamp_pyc(code,mtime=0,source_size=0):\n ''\n data=bytearray(MAGIC_NUMBER)\n data.extend(_pack_uint32(0))\n data.extend(_pack_uint32(mtime))\n data.extend(_pack_uint32(source_size))\n data.extend(marshal.dumps(code))\n return data\n \n \ndef _code_to_hash_pyc(code,source_hash,checked=True ):\n ''\n data=bytearray(MAGIC_NUMBER)\n flags=0b1 |checked <<1\n data.extend(_pack_uint32(flags))\n assert len(source_hash)==8\n data.extend(source_hash)\n data.extend(marshal.dumps(code))\n return data\n \n \ndef decode_source(source_bytes):\n ''\n\n\n \n import tokenize\n source_bytes_readline=_io.BytesIO(source_bytes).readline\n encoding=tokenize.detect_encoding(source_bytes_readline)\n newline_decoder=_io.IncrementalNewlineDecoder(None ,True )\n return newline_decoder.decode(source_bytes.decode(encoding[0]))\n \n \n \n \n_POPULATE=object()\n\n\ndef spec_from_file_location(name,location=None ,*,loader=None ,\nsubmodule_search_locations=_POPULATE):\n ''\n\n\n\n\n\n\n\n\n \n if location is None :\n \n \n \n location=''\n if hasattr(loader,'get_filename'):\n \n try :\n location=loader.get_filename(name)\n except ImportError:\n pass\n else :\n location=_os.fspath(location)\n \n \n \n \n \n \n \n spec=_bootstrap.ModuleSpec(name,loader,origin=location)\n spec._set_fileattr=True\n \n \n if loader is None :\n for loader_class,suffixes in _get_supported_file_loaders():\n if location.endswith(tuple(suffixes)):\n loader=loader_class(name,location)\n spec.loader=loader\n break\n else :\n return None\n \n \n if submodule_search_locations is _POPULATE:\n \n if hasattr(loader,'is_package'):\n try :\n is_package=loader.is_package(name)\n except ImportError:\n pass\n else :\n if is_package:\n spec.submodule_search_locations=[]\n else :\n spec.submodule_search_locations=submodule_search_locations\n if spec.submodule_search_locations ==[]:\n if location:\n dirname=_path_split(location)[0]\n spec.submodule_search_locations.append(dirname)\n \n return spec\n \n \n \n \nclass WindowsRegistryFinder:\n\n ''\n \n REGISTRY_KEY=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}')\n REGISTRY_KEY_DEBUG=(\n 'Software\\\\Python\\\\PythonCore\\\\{sys_version}'\n '\\\\Modules\\\\{fullname}\\\\Debug')\n DEBUG_BUILD=False\n \n @classmethod\n def _open_registry(cls,key):\n try :\n return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,key)\n except OSError:\n return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE,key)\n \n @classmethod\n def _search_registry(cls,fullname):\n if cls.DEBUG_BUILD:\n registry_key=cls.REGISTRY_KEY_DEBUG\n else :\n registry_key=cls.REGISTRY_KEY\n key=registry_key.format(fullname=fullname,\n sys_version='%d.%d'%sys.version_info[:2])\n try :\n with cls._open_registry(key)as hkey:\n filepath=_winreg.QueryValue(hkey,'')\n except OSError:\n return None\n return filepath\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n filepath=cls._search_registry(fullname)\n if filepath is None :\n return None\n try :\n _path_stat(filepath)\n except OSError:\n return None\n for loader,suffixes in _get_supported_file_loaders():\n if filepath.endswith(tuple(suffixes)):\n spec=_bootstrap.spec_from_loader(fullname,\n loader(fullname,filepath),\n origin=filepath)\n return spec\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n \n spec=cls.find_spec(fullname,path)\n if spec is not None :\n return spec.loader\n else :\n return None\n \n \nclass _LoaderBasics:\n\n ''\n \n \n def is_package(self,fullname):\n ''\n \n filename=_path_split(self.get_filename(fullname))[1]\n filename_base=filename.rsplit('.',1)[0]\n tail_name=fullname.rpartition('.')[2]\n return filename_base =='__init__'and tail_name !='__init__'\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n ''\n code=self.get_code(module.__name__)\n if code is None :\n raise ImportError('cannot load module {!r} when get_code() '\n 'returns None'.format(module.__name__))\n _bootstrap._call_with_frames_removed(exec,code,module.__dict__)\n \n def load_module(self,fullname):\n ''\n return _bootstrap._load_module_shim(self,fullname)\n \n \nclass SourceLoader(_LoaderBasics):\n\n def path_mtime(self,path):\n ''\n\n\n\n \n raise OSError\n \n def path_stats(self,path):\n ''\n\n\n\n\n\n\n\n\n\n \n return {'mtime':self.path_mtime(path)}\n \n def _cache_bytecode(self,source_path,cache_path,data):\n ''\n\n\n\n\n \n \n return self.set_data(cache_path,data)\n \n def set_data(self,path,data):\n ''\n\n\n \n \n \n def get_source(self,fullname):\n ''\n path=self.get_filename(fullname)\n try :\n source_bytes=self.get_data(path)\n except OSError as exc:\n raise ImportError('source not available through get_data()',\n name=fullname)from exc\n return decode_source(source_bytes)\n \n def source_to_code(self,data,path,*,_optimize=-1):\n ''\n\n\n \n return _bootstrap._call_with_frames_removed(compile,data,path,'exec',\n dont_inherit=True ,optimize=_optimize)\n \n def get_code(self,fullname):\n ''\n\n\n\n\n \n source_path=self.get_filename(fullname)\n source_mtime=None\n source_bytes=None\n source_hash=None\n hash_based=False\n check_source=True\n try :\n bytecode_path=cache_from_source(source_path)\n except NotImplementedError:\n bytecode_path=None\n else :\n try :\n st=self.path_stats(source_path)\n except OSError:\n pass\n else :\n source_mtime=int(st['mtime'])\n try :\n data=self.get_data(bytecode_path)\n except OSError:\n pass\n else :\n exc_details={\n 'name':fullname,\n 'path':bytecode_path,\n }\n try :\n flags=_classify_pyc(data,fullname,exc_details)\n bytes_data=memoryview(data)[16:]\n hash_based=flags&0b1 !=0\n if hash_based:\n check_source=flags&0b10 !=0\n if (_imp.check_hash_based_pycs !='never'and\n (check_source or\n _imp.check_hash_based_pycs =='always')):\n source_bytes=self.get_data(source_path)\n source_hash=_imp.source_hash(\n _RAW_MAGIC_NUMBER,\n source_bytes,\n )\n _validate_hash_pyc(data,source_hash,fullname,\n exc_details)\n else :\n _validate_timestamp_pyc(\n data,\n source_mtime,\n st['size'],\n fullname,\n exc_details,\n )\n except (ImportError,EOFError):\n pass\n else :\n _bootstrap._verbose_message('{} matches {}',bytecode_path,\n source_path)\n return _compile_bytecode(bytes_data,name=fullname,\n bytecode_path=bytecode_path,\n source_path=source_path)\n if source_bytes is None :\n source_bytes=self.get_data(source_path)\n code_object=self.source_to_code(source_bytes,source_path)\n _bootstrap._verbose_message('code object from {}',source_path)\n if (not sys.dont_write_bytecode and bytecode_path is not None and\n source_mtime is not None ):\n if hash_based:\n if source_hash is None :\n source_hash=_imp.source_hash(source_bytes)\n data=_code_to_hash_pyc(code_object,source_hash,check_source)\n else :\n data=_code_to_timestamp_pyc(code_object,source_mtime,\n len(source_bytes))\n try :\n self._cache_bytecode(source_path,bytecode_path,data)\n except NotImplementedError:\n pass\n return code_object\n \n \nclass FileLoader:\n\n ''\n \n \n def __init__(self,fullname,path):\n ''\n \n self.name=fullname\n self.path=path\n \n def __eq__(self,other):\n return (self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n @_check_name\n def load_module(self,fullname):\n ''\n\n\n\n \n \n \n \n return super(FileLoader,self).load_module(fullname)\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n def get_data(self,path):\n ''\n if isinstance(self,(SourceLoader,ExtensionFileLoader)):\n with _io.open_code(str(path))as file:\n return file.read()\n else :\n with _io.FileIO(path,'r')as file:\n return file.read()\n \n \n \n @_check_name\n def get_resource_reader(self,module):\n if self.is_package(module):\n return self\n return None\n \n def open_resource(self,resource):\n path=_path_join(_path_split(self.path)[0],resource)\n return _io.FileIO(path,'r')\n \n def resource_path(self,resource):\n if not self.is_resource(resource):\n raise FileNotFoundError\n path=_path_join(_path_split(self.path)[0],resource)\n return path\n \n def is_resource(self,name):\n if path_sep in name:\n return False\n path=_path_join(_path_split(self.path)[0],name)\n return _path_isfile(path)\n \n def contents(self):\n return iter(_os.listdir(_path_split(self.path)[0]))\n \n \nclass SourceFileLoader(FileLoader,SourceLoader):\n\n ''\n \n def path_stats(self,path):\n ''\n st=_path_stat(path)\n return {'mtime':st.st_mtime,'size':st.st_size}\n \n def _cache_bytecode(self,source_path,bytecode_path,data):\n \n mode=_calc_mode(source_path)\n return self.set_data(bytecode_path,data,_mode=mode)\n \n def set_data(self,path,data,*,_mode=0o666):\n ''\n parent,filename=_path_split(path)\n path_parts=[]\n \n while parent and not _path_isdir(parent):\n parent,part=_path_split(parent)\n path_parts.append(part)\n \n for part in reversed(path_parts):\n parent=_path_join(parent,part)\n try :\n _os.mkdir(parent)\n except FileExistsError:\n \n continue\n except OSError as exc:\n \n \n _bootstrap._verbose_message('could not create {!r}: {!r}',\n parent,exc)\n return\n try :\n _write_atomic(path,data,_mode)\n _bootstrap._verbose_message('created {!r}',path)\n except OSError as exc:\n \n _bootstrap._verbose_message('could not create {!r}: {!r}',path,\n exc)\n \n \nclass SourcelessFileLoader(FileLoader,_LoaderBasics):\n\n ''\n \n def get_code(self,fullname):\n path=self.get_filename(fullname)\n data=self.get_data(path)\n \n \n exc_details={\n 'name':fullname,\n 'path':path,\n }\n _classify_pyc(data,fullname,exc_details)\n return _compile_bytecode(\n memoryview(data)[16:],\n name=fullname,\n bytecode_path=path,\n )\n \n def get_source(self,fullname):\n ''\n return None\n \n \n \nEXTENSION_SUFFIXES=[]\n\n\nclass ExtensionFileLoader(FileLoader,_LoaderBasics):\n\n ''\n\n\n\n \n \n def __init__(self,name,path):\n self.name=name\n self.path=path\n \n def __eq__(self,other):\n return (self.__class__ ==other.__class__ and\n self.__dict__ ==other.__dict__)\n \n def __hash__(self):\n return hash(self.name)^hash(self.path)\n \n def create_module(self,spec):\n ''\n module=_bootstrap._call_with_frames_removed(\n _imp.create_dynamic,spec)\n _bootstrap._verbose_message('extension module {!r} loaded from {!r}',\n spec.name,self.path)\n return module\n \n def exec_module(self,module):\n ''\n _bootstrap._call_with_frames_removed(_imp.exec_dynamic,module)\n _bootstrap._verbose_message('extension module {!r} executed from {!r}',\n self.name,self.path)\n \n def is_package(self,fullname):\n ''\n file_name=_path_split(self.path)[1]\n return any(file_name =='__init__'+suffix\n for suffix in EXTENSION_SUFFIXES)\n \n def get_code(self,fullname):\n ''\n return None\n \n def get_source(self,fullname):\n ''\n return None\n \n @_check_name\n def get_filename(self,fullname):\n ''\n return self.path\n \n \nclass _NamespacePath:\n ''\n\n\n\n \n \n def __init__(self,name,path,path_finder):\n self._name=name\n self._path=path\n self._last_parent_path=tuple(self._get_parent_path())\n self._path_finder=path_finder\n \n def _find_parent_path_names(self):\n ''\n parent,dot,me=self._name.rpartition('.')\n if dot =='':\n \n return 'sys','path'\n \n \n return parent,'__path__'\n \n def _get_parent_path(self):\n parent_module_name,path_attr_name=self._find_parent_path_names()\n return getattr(sys.modules[parent_module_name],path_attr_name)\n \n def _recalculate(self):\n \n parent_path=tuple(self._get_parent_path())\n if parent_path !=self._last_parent_path:\n spec=self._path_finder(self._name,parent_path)\n \n \n if spec is not None and spec.loader is None :\n if spec.submodule_search_locations:\n self._path=spec.submodule_search_locations\n self._last_parent_path=parent_path\n return self._path\n \n def __iter__(self):\n return iter(self._recalculate())\n \n def __getitem__(self,index):\n return self._recalculate()[index]\n \n def __setitem__(self,index,path):\n self._path[index]=path\n \n def __len__(self):\n return len(self._recalculate())\n \n def __repr__(self):\n return '_NamespacePath({!r})'.format(self._path)\n \n def __contains__(self,item):\n return item in self._recalculate()\n \n def append(self,item):\n self._path.append(item)\n \n \n \nclass _NamespaceLoader:\n def __init__(self,name,path,path_finder):\n self._path=_NamespacePath(name,path,path_finder)\n \n @classmethod\n def module_repr(cls,module):\n ''\n\n\n\n \n return ''.format(module.__name__)\n \n def is_package(self,fullname):\n return True\n \n def get_source(self,fullname):\n return ''\n \n def get_code(self,fullname):\n return compile('','','exec',dont_inherit=True )\n \n def create_module(self,spec):\n ''\n \n def exec_module(self,module):\n pass\n \n def load_module(self,fullname):\n ''\n\n\n\n \n \n _bootstrap._verbose_message('namespace module loaded with path {!r}',\n self._path)\n return _bootstrap._load_module_shim(self,fullname)\n \n \n \n \nclass PathFinder:\n\n ''\n \n @classmethod\n def invalidate_caches(cls):\n ''\n \n for name,finder in list(sys.path_importer_cache.items()):\n if finder is None :\n del sys.path_importer_cache[name]\n elif hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n @classmethod\n def _path_hooks(cls,path):\n ''\n if sys.path_hooks is not None and not sys.path_hooks:\n _warnings.warn('sys.path_hooks is empty',ImportWarning)\n for hook in sys.path_hooks:\n try :\n return hook(path)\n except ImportError:\n continue\n else :\n return None\n \n @classmethod\n def _path_importer_cache(cls,path):\n ''\n\n\n\n\n \n if path =='':\n try :\n path=_os.getcwd()\n except FileNotFoundError:\n \n \n return None\n try :\n finder=sys.path_importer_cache[path]\n except KeyError:\n finder=cls._path_hooks(path)\n sys.path_importer_cache[path]=finder\n return finder\n \n @classmethod\n def _legacy_get_spec(cls,fullname,finder):\n \n \n if hasattr(finder,'find_loader'):\n loader,portions=finder.find_loader(fullname)\n else :\n loader=finder.find_module(fullname)\n portions=[]\n if loader is not None :\n return _bootstrap.spec_from_loader(fullname,loader)\n spec=_bootstrap.ModuleSpec(fullname,None )\n spec.submodule_search_locations=portions\n return spec\n \n @classmethod\n def _get_spec(cls,fullname,path,target=None ):\n ''\n \n \n namespace_path=[]\n for entry in path:\n if not isinstance(entry,(str,bytes)):\n continue\n finder=cls._path_importer_cache(entry)\n if finder is not None :\n if hasattr(finder,'find_spec'):\n spec=finder.find_spec(fullname,target)\n else :\n spec=cls._legacy_get_spec(fullname,finder)\n if spec is None :\n continue\n if spec.loader is not None :\n return spec\n portions=spec.submodule_search_locations\n if portions is None :\n raise ImportError('spec missing loader')\n \n \n \n \n namespace_path.extend(portions)\n else :\n spec=_bootstrap.ModuleSpec(fullname,None )\n spec.submodule_search_locations=namespace_path\n return spec\n \n @classmethod\n def find_spec(cls,fullname,path=None ,target=None ):\n ''\n\n\n \n if path is None :\n path=sys.path\n spec=cls._get_spec(fullname,path,target)\n if spec is None :\n return None\n elif spec.loader is None :\n namespace_path=spec.submodule_search_locations\n if namespace_path:\n \n \n spec.origin=None\n spec.submodule_search_locations=_NamespacePath(fullname,namespace_path,cls._get_spec)\n return spec\n else :\n return None\n else :\n return spec\n \n @classmethod\n def find_module(cls,fullname,path=None ):\n ''\n\n\n\n\n \n spec=cls.find_spec(fullname,path)\n if spec is None :\n return None\n return spec.loader\n \n @classmethod\n def find_distributions(cls,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n from importlib.metadata import MetadataPathFinder\n return MetadataPathFinder.find_distributions(*args,**kwargs)\n \n \nclass FileFinder:\n\n ''\n\n\n\n\n \n \n def __init__(self,path,*loader_details):\n ''\n\n \n loaders=[]\n for loader,suffixes in loader_details:\n loaders.extend((suffix,loader)for suffix in suffixes)\n self._loaders=loaders\n \n self.path=path or '.'\n self._path_mtime=-1\n self._path_cache=set()\n self._relaxed_path_cache=set()\n \n def invalidate_caches(self):\n ''\n self._path_mtime=-1\n \n find_module=_find_module_shim\n \n def find_loader(self,fullname):\n ''\n\n\n\n\n \n spec=self.find_spec(fullname)\n if spec is None :\n return None ,[]\n return spec.loader,spec.submodule_search_locations or []\n \n def _get_spec(self,loader_class,fullname,path,smsl,target):\n loader=loader_class(fullname,path)\n return spec_from_file_location(fullname,path,loader=loader,\n submodule_search_locations=smsl)\n \n def find_spec(self,fullname,target=None ):\n ''\n\n\n \n is_namespace=False\n tail_module=fullname.rpartition('.')[2]\n try :\n mtime=_path_stat(self.path or _os.getcwd()).st_mtime\n except OSError:\n mtime=-1\n if mtime !=self._path_mtime:\n self._fill_cache()\n self._path_mtime=mtime\n \n if _relax_case():\n cache=self._relaxed_path_cache\n cache_module=tail_module.lower()\n else :\n cache=self._path_cache\n cache_module=tail_module\n \n if cache_module in cache:\n base_path=_path_join(self.path,tail_module)\n for suffix,loader_class in self._loaders:\n init_filename='__init__'+suffix\n full_path=_path_join(base_path,init_filename)\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,[base_path],target)\n else :\n \n \n is_namespace=_path_isdir(base_path)\n \n for suffix,loader_class in self._loaders:\n full_path=_path_join(self.path,tail_module+suffix)\n _bootstrap._verbose_message('trying {}',full_path,verbosity=2)\n if cache_module+suffix in cache:\n if _path_isfile(full_path):\n return self._get_spec(loader_class,fullname,full_path,\n None ,target)\n if is_namespace:\n _bootstrap._verbose_message('possible namespace for {}',base_path)\n spec=_bootstrap.ModuleSpec(fullname,None )\n spec.submodule_search_locations=[base_path]\n return spec\n return None\n \n def _fill_cache(self):\n ''\n path=self.path\n try :\n contents=_os.listdir(path or _os.getcwd())\n except (FileNotFoundError,PermissionError,NotADirectoryError):\n \n \n contents=[]\n \n \n if not sys.platform.startswith('win'):\n self._path_cache=set(contents)\n else :\n \n \n \n \n \n lower_suffix_contents=set()\n for item in contents:\n name,dot,suffix=item.partition('.')\n if dot:\n new_name='{}.{}'.format(name,suffix.lower())\n else :\n new_name=name\n lower_suffix_contents.add(new_name)\n self._path_cache=lower_suffix_contents\n if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):\n self._relaxed_path_cache={fn.lower()for fn in contents}\n \n @classmethod\n def path_hook(cls,*loader_details):\n ''\n\n\n\n\n\n\n \n def path_hook_for_FileFinder(path):\n ''\n if not _path_isdir(path):\n raise ImportError('only directories are supported',path=path)\n return cls(path,*loader_details)\n \n return path_hook_for_FileFinder\n \n def __repr__(self):\n return 'FileFinder({!r})'.format(self.path)\n \n \n \n \ndef _fix_up_module(ns,name,pathname,cpathname=None ):\n\n loader=ns.get('__loader__')\n spec=ns.get('__spec__')\n if not loader:\n if spec:\n loader=spec.loader\n elif pathname ==cpathname:\n loader=SourcelessFileLoader(name,pathname)\n else :\n loader=SourceFileLoader(name,pathname)\n if not spec:\n spec=spec_from_file_location(name,pathname,loader=loader)\n try :\n ns['__spec__']=spec\n ns['__loader__']=loader\n ns['__file__']=pathname\n ns['__cached__']=cpathname\n except Exception:\n \n pass\n \n \ndef _get_supported_file_loaders():\n ''\n\n\n \n extensions=ExtensionFileLoader,_imp.extension_suffixes()\n source=SourceFileLoader,SOURCE_SUFFIXES\n bytecode=SourcelessFileLoader,BYTECODE_SUFFIXES\n return [extensions,source,bytecode]\n \n \ndef _setup(_bootstrap_module):\n ''\n\n\n\n\n \n global sys,_imp,_bootstrap\n _bootstrap=_bootstrap_module\n sys=_bootstrap.sys\n _imp=_bootstrap._imp\n \n \n self_module=sys.modules[__name__]\n \n \n for builtin_name in ('_warnings','builtins','marshal'):\n if builtin_name not in sys.modules:\n builtin_module=_bootstrap._builtin_from_name(builtin_name)\n else :\n builtin_module=sys.modules[builtin_name]\n setattr(self_module,builtin_name,builtin_module)\n \n \n os_details=('posix',['/']),('nt',['\\\\','/'])\n for builtin_os,path_separators in os_details:\n \n assert all(len(sep)==1 for sep in path_separators)\n path_sep=path_separators[0]\n if builtin_os in sys.modules:\n os_module=sys.modules[builtin_os]\n break\n else :\n try :\n os_module=_bootstrap._builtin_from_name(builtin_os)\n break\n except ImportError:\n continue\n else :\n raise ImportError('importlib requires posix or nt')\n setattr(self_module,'_os',os_module)\n setattr(self_module,'path_sep',path_sep)\n setattr(self_module,'path_separators',''.join(path_separators))\n setattr(self_module,'_pathseps_with_colon',{f':{s}'for s in path_separators})\n \n \n \n \n import _thread\n setattr(self_module,'_thread',_thread)\n \n \n \n \n import _weakref\n setattr(self_module,'_weakref',_weakref)\n \n \n if builtin_os =='nt':\n winreg_module=_bootstrap._builtin_from_name('winreg')\n setattr(self_module,'_winreg',winreg_module)\n \n \n setattr(self_module,'_relax_case',_make_relax_case())\n EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())\n if builtin_os =='nt':\n SOURCE_SUFFIXES.append('.pyw')\n if '_d.pyd'in EXTENSION_SUFFIXES:\n WindowsRegistryFinder.DEBUG_BUILD=True\n \n \ndef _install(_bootstrap_module):\n ''\n _setup(_bootstrap_module)\n supported_loaders=_get_supported_file_loaders()\n sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])\n sys.meta_path.append(PathFinder)\n \n", ["_io", "_thread", "_weakref", "importlib.metadata", "tokenize"]], "importlib": [".py", "''\n__all__=['__import__','import_module','invalidate_caches','reload']\n\n\n\n\n\n\n\n\n\nimport _imp\nimport sys\n\ntry :\n import _frozen_importlib as _bootstrap\nexcept ImportError:\n from . import _bootstrap\n _bootstrap._setup(sys,_imp)\nelse :\n\n\n _bootstrap.__name__='importlib._bootstrap'\n _bootstrap.__package__='importlib'\n try :\n _bootstrap.__file__=__file__.replace('__init__.py','_bootstrap.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap']=_bootstrap\n \ntry :\n import _frozen_importlib_external as _bootstrap_external\nexcept ImportError:\n from . import _bootstrap_external\n _bootstrap_external._setup(_bootstrap)\n _bootstrap._bootstrap_external=_bootstrap_external\nelse :\n _bootstrap_external.__name__='importlib._bootstrap_external'\n _bootstrap_external.__package__='importlib'\n try :\n _bootstrap_external.__file__=__file__.replace('__init__.py','_bootstrap_external.py')\n except NameError:\n \n \n pass\n sys.modules['importlib._bootstrap_external']=_bootstrap_external\n \n \n_pack_uint32=_bootstrap_external._pack_uint32\n_unpack_uint32=_bootstrap_external._unpack_uint32\n\n\n\n\nimport types\nimport warnings\n\n\n\n\nfrom ._bootstrap import __import__\n\n\ndef invalidate_caches():\n ''\n \n for finder in sys.meta_path:\n if hasattr(finder,'invalidate_caches'):\n finder.invalidate_caches()\n \n \ndef find_loader(name,path=None ):\n ''\n\n\n\n\n\n \n warnings.warn('Deprecated since Python 3.4. '\n 'Use importlib.util.find_spec() instead.',\n DeprecationWarning,stacklevel=2)\n try :\n loader=sys.modules[name].__loader__\n if loader is None :\n raise ValueError('{}.__loader__ is None'.format(name))\n else :\n return loader\n except KeyError:\n pass\n except AttributeError:\n raise ValueError('{}.__loader__ is not set'.format(name))from None\n \n spec=_bootstrap._find_spec(name,path)\n \n if spec is None :\n return None\n if spec.loader is None :\n if spec.submodule_search_locations is None :\n raise ImportError('spec for {} missing loader'.format(name),\n name=name)\n raise ImportError('namespace packages do not have loaders',\n name=name)\n return spec.loader\n \n \ndef import_module(name,package=None ):\n ''\n\n\n\n\n\n \n level=0\n if name.startswith('.'):\n if not package:\n msg=(\"the 'package' argument is required to perform a relative \"\n \"import for {!r}\")\n raise TypeError(msg.format(name))\n for character in name:\n if character !='.':\n break\n level +=1\n return _bootstrap._gcd_import(name[level:],package,level)\n \n \n_RELOADING={}\n\n\ndef reload(module):\n ''\n\n\n\n \n if not module or not isinstance(module,types.ModuleType):\n raise TypeError(\"reload() argument must be a module\")\n try :\n name=module.__spec__.name\n except AttributeError:\n name=module.__name__\n \n if sys.modules.get(name)is not module:\n msg=\"module {} not in sys.modules\"\n raise ImportError(msg.format(name),name=name)\n if name in _RELOADING:\n return _RELOADING[name]\n _RELOADING[name]=module\n try :\n parent_name=name.rpartition('.')[0]\n if parent_name:\n try :\n parent=sys.modules[parent_name]\n except KeyError:\n msg=\"parent {!r} not in sys.modules\"\n raise ImportError(msg.format(parent_name),\n name=parent_name)from None\n else :\n pkgpath=parent.__path__\n else :\n pkgpath=None\n target=module\n spec=module.__spec__=_bootstrap._find_spec(name,pkgpath,target)\n if spec is None :\n raise ModuleNotFoundError(f\"spec not found for the module {name!r}\",name=name)\n _bootstrap._exec(spec,module)\n \n return sys.modules[name]\n finally :\n try :\n del _RELOADING[name]\n except KeyError:\n pass\n", ["_frozen_importlib", "_frozen_importlib_external", "_imp", "importlib", "importlib._bootstrap", "importlib._bootstrap_external", "sys", "types", "warnings"], 1], "logging.brython_handlers": [".py", "import logging\n\nfrom browser.ajax import ajax\n\n\nclass XMLHTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,url,method=\"GET\"):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in [\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n self.url=url\n self.method=method\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n req=ajax.open(self.method,self.url,sync=False )\n req.send(self.mapLogRecord(record))\n except :\n self.handleError(record)\n", ["browser.ajax", "logging"]], "logging.config": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nConfiguration functions for the logging package for Python. The core package\nis based on PEP 282 and comments thereto in comp.lang.python, and influenced\nby Apache's log4j system.\n\nCopyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport sys,logging,logging.handlers,socket,struct,traceback,re\nimport io\n\ntry :\n import _thread as thread\n import threading\nexcept ImportError:\n thread=None\n \nfrom socketserver import ThreadingTCPServer,StreamRequestHandler\n\n\nDEFAULT_LOGGING_CONFIG_PORT=9030\n\nif sys.platform ==\"win32\":\n RESET_ERROR=10054\nelse :\n RESET_ERROR=104\n \n \n \n \n \n \n_listener=None\n\ndef fileConfig(fname,defaults=None ,disable_existing_loggers=True ):\n ''\n\n\n\n\n\n\n \n import configparser\n \n cp=configparser.ConfigParser(defaults)\n if hasattr(fname,'readline'):\n cp.read_file(fname)\n else :\n cp.read(fname)\n \n formatters=_create_formatters(cp)\n \n \n logging._acquireLock()\n try :\n logging._handlers.clear()\n del logging._handlerList[:]\n \n handlers=_install_handlers(cp,formatters)\n _install_loggers(cp,handlers,disable_existing_loggers)\n finally :\n logging._releaseLock()\n \n \ndef _resolve(name):\n ''\n name=name.split('.')\n used=name.pop(0)\n found=__import__(used)\n for n in name:\n used=used+'.'+n\n try :\n found=getattr(found,n)\n except AttributeError:\n __import__(used)\n found=getattr(found,n)\n return found\n \ndef _strip_spaces(alist):\n return map(lambda x:x.strip(),alist)\n \ndef _create_formatters(cp):\n ''\n flist=cp[\"formatters\"][\"keys\"]\n if not len(flist):\n return {}\n flist=flist.split(\",\")\n flist=_strip_spaces(flist)\n formatters={}\n for form in flist:\n sectname=\"formatter_%s\"%form\n fs=cp.get(sectname,\"format\",raw=True ,fallback=None )\n dfs=cp.get(sectname,\"datefmt\",raw=True ,fallback=None )\n c=logging.Formatter\n class_name=cp[sectname].get(\"class\")\n if class_name:\n c=_resolve(class_name)\n f=c(fs,dfs)\n formatters[form]=f\n return formatters\n \n \ndef _install_handlers(cp,formatters):\n ''\n hlist=cp[\"handlers\"][\"keys\"]\n if not len(hlist):\n return {}\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n handlers={}\n fixups=[]\n for hand in hlist:\n section=cp[\"handler_%s\"%hand]\n klass=section[\"class\"]\n fmt=section.get(\"formatter\",\"\")\n try :\n klass=eval(klass,vars(logging))\n except (AttributeError,NameError):\n klass=_resolve(klass)\n args=section[\"args\"]\n args=eval(args,vars(logging))\n h=klass(*args)\n if \"level\"in section:\n level=section[\"level\"]\n h.setLevel(logging._levelNames[level])\n if len(fmt):\n h.setFormatter(formatters[fmt])\n if issubclass(klass,logging.handlers.MemoryHandler):\n target=section.get(\"target\",\"\")\n if len(target):\n fixups.append((h,target))\n handlers[hand]=h\n \n for h,t in fixups:\n h.setTarget(handlers[t])\n return handlers\n \ndef _handle_existing_loggers(existing,child_loggers,disable_existing):\n ''\n\n\n\n\n\n\n\n\n \n root=logging.root\n for log in existing:\n logger=root.manager.loggerDict[log]\n if log in child_loggers:\n logger.level=logging.NOTSET\n logger.handlers=[]\n logger.propagate=True\n else :\n logger.disabled=disable_existing\n \ndef _install_loggers(cp,handlers,disable_existing):\n ''\n \n \n llist=cp[\"loggers\"][\"keys\"]\n llist=llist.split(\",\")\n llist=list(map(lambda x:x.strip(),llist))\n llist.remove(\"root\")\n section=cp[\"logger_root\"]\n root=logging.root\n log=root\n if \"level\"in section:\n level=section[\"level\"]\n log.setLevel(logging._levelNames[level])\n for h in root.handlers[:]:\n root.removeHandler(h)\n hlist=section[\"handlers\"]\n if len(hlist):\n hlist=hlist.split(\",\")\n hlist=_strip_spaces(hlist)\n for hand in hlist:\n log.addHandler(handlers[hand])\n \n \n \n \n \n \n \n \n \n \n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n for log in llist:\n section=cp[\"logger_%s\"%log]\n qn=section[\"qualname\"]\n propagate=section.getint(\"propagate\",fallback=1)\n logger=logging.getLogger(qn)\n if qn in existing:\n i=existing.index(qn)+1\n prefixed=qn+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i [a-z]+)://(?P.*)$')\n \n WORD_PATTERN=re.compile(r'^\\s*(\\w+)\\s*')\n DOT_PATTERN=re.compile(r'^\\.\\s*(\\w+)\\s*')\n INDEX_PATTERN=re.compile(r'^\\[\\s*(\\w+)\\s*\\]\\s*')\n DIGIT_PATTERN=re.compile(r'^\\d+$')\n \n value_converters={\n 'ext':'ext_convert',\n 'cfg':'cfg_convert',\n }\n \n \n importer=staticmethod(__import__)\n \n def __init__(self,config):\n self.config=ConvertingDict(config)\n self.config.configurator=self\n \n def resolve(self,s):\n ''\n\n\n \n name=s.split('.')\n used=name.pop(0)\n try :\n found=self.importer(used)\n for frag in name:\n used +='.'+frag\n try :\n found=getattr(found,frag)\n except AttributeError:\n self.importer(used)\n found=getattr(found,frag)\n return found\n except ImportError:\n e,tb=sys.exc_info()[1:]\n v=ValueError('Cannot resolve %r: %s'%(s,e))\n v.__cause__,v.__traceback__=e,tb\n raise v\n \n def ext_convert(self,value):\n ''\n return self.resolve(value)\n \n def cfg_convert(self,value):\n ''\n rest=value\n m=self.WORD_PATTERN.match(rest)\n if m is None :\n raise ValueError(\"Unable to convert %r\"%value)\n else :\n rest=rest[m.end():]\n d=self.config[m.groups()[0]]\n \n while rest:\n m=self.DOT_PATTERN.match(rest)\n if m:\n d=d[m.groups()[0]]\n else :\n m=self.INDEX_PATTERN.match(rest)\n if m:\n idx=m.groups()[0]\n if not self.DIGIT_PATTERN.match(idx):\n d=d[idx]\n else :\n try :\n n=int(idx)\n d=d[n]\n except TypeError:\n d=d[idx]\n if m:\n rest=rest[m.end():]\n else :\n raise ValueError('Unable to convert '\n '%r at %r'%(value,rest))\n \n return d\n \n def convert(self,value):\n ''\n\n\n\n \n if not isinstance(value,ConvertingDict)and isinstance(value,dict):\n value=ConvertingDict(value)\n value.configurator=self\n elif not isinstance(value,ConvertingList)and isinstance(value,list):\n value=ConvertingList(value)\n value.configurator=self\n elif not isinstance(value,ConvertingTuple)and\\\n isinstance(value,tuple):\n value=ConvertingTuple(value)\n value.configurator=self\n elif isinstance(value,str):\n m=self.CONVERT_PATTERN.match(value)\n if m:\n d=m.groupdict()\n prefix=d['prefix']\n converter=self.value_converters.get(prefix,None )\n if converter:\n suffix=d['suffix']\n converter=getattr(self,converter)\n value=converter(suffix)\n return value\n \n def configure_custom(self,config):\n ''\n c=config.pop('()')\n if not callable(c):\n c=self.resolve(c)\n props=config.pop('.',None )\n \n kwargs=dict([(k,config[k])for k in config if valid_ident(k)])\n result=c(**kwargs)\n if props:\n for name,value in props.items():\n setattr(result,name,value)\n return result\n \n def as_tuple(self,value):\n ''\n if isinstance(value,list):\n value=tuple(value)\n return value\n \nclass DictConfigurator(BaseConfigurator):\n ''\n\n\n \n \n def configure(self):\n ''\n \n config=self.config\n if 'version'not in config:\n raise ValueError(\"dictionary doesn't specify a version\")\n if config['version']!=1:\n raise ValueError(\"Unsupported version: %s\"%config['version'])\n incremental=config.pop('incremental',False )\n EMPTY_DICT={}\n logging._acquireLock()\n try :\n if incremental:\n handlers=config.get('handlers',EMPTY_DICT)\n for name in handlers:\n if name not in logging._handlers:\n raise ValueError('No handler found with '\n 'name %r'%name)\n else :\n try :\n handler=logging._handlers[name]\n handler_config=handlers[name]\n level=handler_config.get('level',None )\n if level:\n handler.setLevel(logging._checkLevel(level))\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r: %s'%(name,e))\n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n try :\n self.configure_logger(name,loggers[name],True )\n except Exception as e:\n raise ValueError('Unable to configure logger '\n '%r: %s'%(name,e))\n root=config.get('root',None )\n if root:\n try :\n self.configure_root(root,True )\n except Exception as e:\n raise ValueError('Unable to configure root '\n 'logger: %s'%e)\n else :\n disable_existing=config.pop('disable_existing_loggers',True )\n \n logging._handlers.clear()\n del logging._handlerList[:]\n \n \n formatters=config.get('formatters',EMPTY_DICT)\n for name in formatters:\n try :\n formatters[name]=self.configure_formatter(\n formatters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'formatter %r: %s'%(name,e))\n \n filters=config.get('filters',EMPTY_DICT)\n for name in filters:\n try :\n filters[name]=self.configure_filter(filters[name])\n except Exception as e:\n raise ValueError('Unable to configure '\n 'filter %r: %s'%(name,e))\n \n \n \n \n handlers=config.get('handlers',EMPTY_DICT)\n deferred=[]\n for name in sorted(handlers):\n try :\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n if 'target not configured yet'in str(e):\n deferred.append(name)\n else :\n raise ValueError('Unable to configure handler '\n '%r: %s'%(name,e))\n \n \n for name in deferred:\n try :\n handler=self.configure_handler(handlers[name])\n handler.name=name\n handlers[name]=handler\n except Exception as e:\n raise ValueError('Unable to configure handler '\n '%r: %s'%(name,e))\n \n \n \n \n \n \n \n \n \n \n \n root=logging.root\n existing=list(root.manager.loggerDict.keys())\n \n \n \n \n existing.sort()\n \n \n child_loggers=[]\n \n loggers=config.get('loggers',EMPTY_DICT)\n for name in loggers:\n if name in existing:\n i=existing.index(name)+1\n prefixed=name+\".\"\n pflen=len(prefixed)\n num_existing=len(existing)\n while i L\",chunk)[0]\n chunk=self.connection.recv(slen)\n while len(chunk)0:\n mode='a'\n BaseRotatingHandler.__init__(self,filename,mode,encoding,delay)\n self.maxBytes=maxBytes\n self.backupCount=backupCount\n \n def doRollover(self):\n ''\n\n \n if self.stream:\n self.stream.close()\n self.stream=None\n if self.backupCount >0:\n for i in range(self.backupCount -1,0,-1):\n sfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,i))\n dfn=self.rotation_filename(\"%s.%d\"%(self.baseFilename,\n i+1))\n if os.path.exists(sfn):\n if os.path.exists(dfn):\n os.remove(dfn)\n os.rename(sfn,dfn)\n dfn=self.rotation_filename(self.baseFilename+\".1\")\n if os.path.exists(dfn):\n os.remove(dfn)\n self.rotate(self.baseFilename,dfn)\n if not self.delay:\n self.stream=self._open()\n \n def shouldRollover(self,record):\n ''\n\n\n\n\n \n if self.stream is None :\n self.stream=self._open()\n if self.maxBytes >0:\n msg=\"%s\\n\"%self.format(record)\n self.stream.seek(0,2)\n if self.stream.tell()+len(msg)>=self.maxBytes:\n return 1\n return 0\n \nclass TimedRotatingFileHandler(BaseRotatingHandler):\n ''\n\n\n\n\n\n \n def __init__(self,filename,when='h',interval=1,backupCount=0,encoding=None ,delay=False ,utc=False ):\n BaseRotatingHandler.__init__(self,filename,'a',encoding,delay)\n self.when=when.upper()\n self.backupCount=backupCount\n self.utc=utc\n \n \n \n \n \n \n \n \n \n \n \n \n if self.when =='S':\n self.interval=1\n self.suffix=\"%Y-%m-%d_%H-%M-%S\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='M':\n self.interval=60\n self.suffix=\"%Y-%m-%d_%H-%M\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when =='H':\n self.interval=60 *60\n self.suffix=\"%Y-%m-%d_%H\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}_\\d{2}(\\.\\w+)?$\"\n elif self.when =='D'or self.when =='MIDNIGHT':\n self.interval=60 *60 *24\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n elif self.when.startswith('W'):\n self.interval=60 *60 *24 *7\n if len(self.when)!=2:\n raise ValueError(\"You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s\"%self.when)\n if self.when[1]<'0'or self.when[1]>'6':\n raise ValueError(\"Invalid day specified for weekly rollover: %s\"%self.when)\n self.dayOfWeek=int(self.when[1])\n self.suffix=\"%Y-%m-%d\"\n self.extMatch=r\"^\\d{4}-\\d{2}-\\d{2}(\\.\\w+)?$\"\n else :\n raise ValueError(\"Invalid rollover interval specified: %s\"%self.when)\n \n self.extMatch=re.compile(self.extMatch,re.ASCII)\n self.interval=self.interval *interval\n if os.path.exists(filename):\n t=os.stat(filename)[ST_MTIME]\n else :\n t=int(time.time())\n self.rolloverAt=self.computeRollover(t)\n \n def computeRollover(self,currentTime):\n ''\n\n \n result=currentTime+self.interval\n \n \n \n \n \n \n \n if self.when =='MIDNIGHT'or self.when.startswith('W'):\n \n if self.utc:\n t=time.gmtime(currentTime)\n else :\n t=time.localtime(currentTime)\n currentHour=t[3]\n currentMinute=t[4]\n currentSecond=t[5]\n \n r=_MIDNIGHT -((currentHour *60+currentMinute)*60+\n currentSecond)\n result=currentTime+r\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n if self.when.startswith('W'):\n day=t[6]\n if day !=self.dayOfWeek:\n if day =self.rolloverAt:\n return 1\n return 0\n \n def getFilesToDelete(self):\n ''\n\n\n\n \n dirName,baseName=os.path.split(self.baseFilename)\n fileNames=os.listdir(dirName)\n result=[]\n prefix=baseName+\".\"\n plen=len(prefix)\n for fileName in fileNames:\n if fileName[:plen]==prefix:\n suffix=fileName[plen:]\n if self.extMatch.match(suffix):\n result.append(os.path.join(dirName,fileName))\n result.sort()\n if len(result)0:\n for s in self.getFilesToDelete():\n os.remove(s)\n if not self.delay:\n self.stream=self._open()\n newRolloverAt=self.computeRollover(currentTime)\n while newRolloverAt <=currentTime:\n newRolloverAt=newRolloverAt+self.interval\n \n if (self.when =='MIDNIGHT'or self.when.startswith('W'))and not self.utc:\n dstAtRollover=time.localtime(newRolloverAt)[-1]\n if dstNow !=dstAtRollover:\n if not dstNow:\n addend=-3600\n else :\n addend=3600\n newRolloverAt +=addend\n self.rolloverAt=newRolloverAt\n \nclass WatchedFileHandler(logging.FileHandler):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,filename,mode='a',encoding=None ,delay=False ):\n logging.FileHandler.__init__(self,filename,mode,encoding,delay)\n self.dev,self.ino=-1,-1\n self._statstream()\n \n def _statstream(self):\n if self.stream:\n sres=os.fstat(self.stream.fileno())\n self.dev,self.ino=sres[ST_DEV],sres[ST_INO]\n \n def emit(self,record):\n ''\n\n\n\n\n\n \n \n \n \n \n try :\n \n sres=os.stat(self.baseFilename)\n except OSError as err:\n if err.errno ==errno.ENOENT:\n sres=None\n else :\n raise\n \n if not sres or sres[ST_DEV]!=self.dev or sres[ST_INO]!=self.ino:\n if self.stream is not None :\n \n self.stream.flush()\n self.stream.close()\n \n self.stream=self._open()\n self._statstream()\n logging.FileHandler.emit(self,record)\n \n \nclass SocketHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,host,port):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n self.host=host\n self.port=port\n self.sock=None\n self.closeOnError=False\n self.retryTime=None\n \n \n \n self.retryStart=1.0\n self.retryMax=30.0\n self.retryFactor=2.0\n \n def makeSocket(self,timeout=1):\n ''\n\n\n \n s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)\n if hasattr(s,'settimeout'):\n s.settimeout(timeout)\n try :\n s.connect((self.host,self.port))\n return s\n except socket.error:\n s.close()\n raise\n \n def createSocket(self):\n ''\n\n\n\n \n now=time.time()\n \n \n \n if self.retryTime is None :\n attempt=True\n else :\n attempt=(now >=self.retryTime)\n if attempt:\n try :\n self.sock=self.makeSocket()\n self.retryTime=None\n except socket.error:\n \n if self.retryTime is None :\n self.retryPeriod=self.retryStart\n else :\n self.retryPeriod=self.retryPeriod *self.retryFactor\n if self.retryPeriod >self.retryMax:\n self.retryPeriod=self.retryMax\n self.retryTime=now+self.retryPeriod\n \n def send(self,s):\n ''\n\n\n\n\n \n if self.sock is None :\n self.createSocket()\n \n \n \n if self.sock:\n try :\n if hasattr(self.sock,\"sendall\"):\n self.sock.sendall(s)\n else :\n sentsofar=0\n left=len(s)\n while left >0:\n sent=self.sock.send(s[sentsofar:])\n sentsofar=sentsofar+sent\n left=left -sent\n except socket.error:\n self.sock.close()\n self.sock=None\n \n def makePickle(self,record):\n ''\n\n\n \n ei=record.exc_info\n if ei:\n \n dummy=self.format(record)\n \n \n \n d=dict(record.__dict__)\n d['msg']=record.getMessage()\n d['args']=None\n d['exc_info']=None\n s=pickle.dumps(d,1)\n slen=struct.pack(\">L\",len(s))\n return slen+s\n \n def handleError(self,record):\n ''\n\n\n\n\n\n \n if self.closeOnError and self.sock:\n self.sock.close()\n self.sock=None\n else :\n logging.Handler.handleError(self,record)\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try :\n s=self.makePickle(record)\n self.send(s)\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n if self.sock:\n self.sock.close()\n self.sock=None\n logging.Handler.close(self)\n finally :\n self.release()\n \nclass DatagramHandler(SocketHandler):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,host,port):\n ''\n\n \n SocketHandler.__init__(self,host,port)\n self.closeOnError=False\n \n def makeSocket(self):\n ''\n\n\n \n s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)\n return s\n \n def send(self,s):\n ''\n\n\n\n\n\n \n if self.sock is None :\n self.createSocket()\n self.sock.sendto(s,(self.host,self.port))\n \nclass SysLogHandler(logging.Handler):\n ''\n\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n LOG_EMERG=0\n LOG_ALERT=1\n LOG_CRIT=2\n LOG_ERR=3\n LOG_WARNING=4\n LOG_NOTICE=5\n LOG_INFO=6\n LOG_DEBUG=7\n \n \n LOG_KERN=0\n LOG_USER=1\n LOG_MAIL=2\n LOG_DAEMON=3\n LOG_AUTH=4\n LOG_SYSLOG=5\n LOG_LPR=6\n LOG_NEWS=7\n LOG_UUCP=8\n LOG_CRON=9\n LOG_AUTHPRIV=10\n LOG_FTP=11\n \n \n LOG_LOCAL0=16\n LOG_LOCAL1=17\n LOG_LOCAL2=18\n LOG_LOCAL3=19\n LOG_LOCAL4=20\n LOG_LOCAL5=21\n LOG_LOCAL6=22\n LOG_LOCAL7=23\n \n priority_names={\n \"alert\":LOG_ALERT,\n \"crit\":LOG_CRIT,\n \"critical\":LOG_CRIT,\n \"debug\":LOG_DEBUG,\n \"emerg\":LOG_EMERG,\n \"err\":LOG_ERR,\n \"error\":LOG_ERR,\n \"info\":LOG_INFO,\n \"notice\":LOG_NOTICE,\n \"panic\":LOG_EMERG,\n \"warn\":LOG_WARNING,\n \"warning\":LOG_WARNING,\n }\n \n facility_names={\n \"auth\":LOG_AUTH,\n \"authpriv\":LOG_AUTHPRIV,\n \"cron\":LOG_CRON,\n \"daemon\":LOG_DAEMON,\n \"ftp\":LOG_FTP,\n \"kern\":LOG_KERN,\n \"lpr\":LOG_LPR,\n \"mail\":LOG_MAIL,\n \"news\":LOG_NEWS,\n \"security\":LOG_AUTH,\n \"syslog\":LOG_SYSLOG,\n \"user\":LOG_USER,\n \"uucp\":LOG_UUCP,\n \"local0\":LOG_LOCAL0,\n \"local1\":LOG_LOCAL1,\n \"local2\":LOG_LOCAL2,\n \"local3\":LOG_LOCAL3,\n \"local4\":LOG_LOCAL4,\n \"local5\":LOG_LOCAL5,\n \"local6\":LOG_LOCAL6,\n \"local7\":LOG_LOCAL7,\n }\n \n \n \n \n \n priority_map={\n \"DEBUG\":\"debug\",\n \"INFO\":\"info\",\n \"WARNING\":\"warning\",\n \"ERROR\":\"error\",\n \"CRITICAL\":\"critical\"\n }\n \n def __init__(self,address=('localhost',SYSLOG_UDP_PORT),\n facility=LOG_USER,socktype=None ):\n ''\n\n\n\n\n\n \n logging.Handler.__init__(self)\n \n self.address=address\n self.facility=facility\n self.socktype=socktype\n \n if isinstance(address,str):\n self.unixsocket=True\n self._connect_unixsocket(address)\n else :\n self.unixsocket=False\n if socktype is None :\n socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_INET,socktype)\n if socktype ==socket.SOCK_STREAM:\n self.socket.connect(address)\n self.socktype=socktype\n self.formatter=None\n \n def _connect_unixsocket(self,address):\n use_socktype=self.socktype\n if use_socktype is None :\n use_socktype=socket.SOCK_DGRAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try :\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except socket.error:\n self.socket.close()\n if self.socktype is not None :\n \n raise\n use_socktype=socket.SOCK_STREAM\n self.socket=socket.socket(socket.AF_UNIX,use_socktype)\n try :\n self.socket.connect(address)\n \n self.socktype=use_socktype\n except socket.error:\n self.socket.close()\n raise\n \n def encodePriority(self,facility,priority):\n ''\n\n\n\n\n \n if isinstance(facility,str):\n facility=self.facility_names[facility]\n if isinstance(priority,str):\n priority=self.priority_names[priority]\n return (facility <<3)|priority\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n self.socket.close()\n logging.Handler.close(self)\n finally :\n self.release()\n \n def mapPriority(self,levelName):\n ''\n\n\n\n\n\n \n return self.priority_map.get(levelName,\"warning\")\n \n ident=''\n append_nul=True\n \n def emit(self,record):\n ''\n\n\n\n\n \n msg=self.format(record)\n if self.ident:\n msg=self.ident+msg\n if self.append_nul:\n msg +='\\000'\n ''\n\n\n \n prio='<%d>'%self.encodePriority(self.facility,\n self.mapPriority(record.levelname))\n prio=prio.encode('utf-8')\n \n msg=msg.encode('utf-8')\n msg=prio+msg\n try :\n if self.unixsocket:\n try :\n self.socket.send(msg)\n except socket.error:\n self.socket.close()\n self._connect_unixsocket(self.address)\n self.socket.send(msg)\n elif self.socktype ==socket.SOCK_DGRAM:\n self.socket.sendto(msg,self.address)\n else :\n self.socket.sendall(msg)\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass SMTPHandler(logging.Handler):\n ''\n\n \n def __init__(self,mailhost,fromaddr,toaddrs,subject,\n credentials=None ,secure=None ,timeout=5.0):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n logging.Handler.__init__(self)\n if isinstance(mailhost,tuple):\n self.mailhost,self.mailport=mailhost\n else :\n self.mailhost,self.mailport=mailhost,None\n if isinstance(credentials,tuple):\n self.username,self.password=credentials\n else :\n self.username=None\n self.fromaddr=fromaddr\n if isinstance(toaddrs,str):\n toaddrs=[toaddrs]\n self.toaddrs=toaddrs\n self.subject=subject\n self.secure=secure\n self.timeout=timeout\n \n def getSubject(self,record):\n ''\n\n\n\n\n \n return self.subject\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n import smtplib\n from email.utils import formatdate\n port=self.mailport\n if not port:\n port=smtplib.SMTP_PORT\n smtp=smtplib.SMTP(self.mailhost,port,timeout=self.timeout)\n msg=self.format(record)\n msg=\"From: %s\\r\\nTo: %s\\r\\nSubject: %s\\r\\nDate: %s\\r\\n\\r\\n%s\"%(\n self.fromaddr,\n \",\".join(self.toaddrs),\n self.getSubject(record),\n formatdate(),msg)\n if self.username:\n if self.secure is not None :\n smtp.ehlo()\n smtp.starttls(*self.secure)\n smtp.ehlo()\n smtp.login(self.username,self.password)\n smtp.sendmail(self.fromaddr,self.toaddrs,msg)\n smtp.quit()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass NTEventLogHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n def __init__(self,appname,dllname=None ,logtype=\"Application\"):\n logging.Handler.__init__(self)\n try :\n import win32evtlogutil,win32evtlog\n self.appname=appname\n self._welu=win32evtlogutil\n if not dllname:\n dllname=os.path.split(self._welu.__file__)\n dllname=os.path.split(dllname[0])\n dllname=os.path.join(dllname[0],r'win32service.pyd')\n self.dllname=dllname\n self.logtype=logtype\n self._welu.AddSourceToRegistry(appname,dllname,logtype)\n self.deftype=win32evtlog.EVENTLOG_ERROR_TYPE\n self.typemap={\n logging.DEBUG:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.INFO:win32evtlog.EVENTLOG_INFORMATION_TYPE,\n logging.WARNING:win32evtlog.EVENTLOG_WARNING_TYPE,\n logging.ERROR:win32evtlog.EVENTLOG_ERROR_TYPE,\n logging.CRITICAL:win32evtlog.EVENTLOG_ERROR_TYPE,\n }\n except ImportError:\n print(\"The Python Win32 extensions for NT (service, event \"\\\n \"logging) appear not to be available.\")\n self._welu=None\n \n def getMessageID(self,record):\n ''\n\n\n\n\n\n \n return 1\n \n def getEventCategory(self,record):\n ''\n\n\n\n\n \n return 0\n \n def getEventType(self,record):\n ''\n\n\n\n\n\n\n\n\n \n return self.typemap.get(record.levelno,self.deftype)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self._welu:\n try :\n id=self.getMessageID(record)\n cat=self.getEventCategory(record)\n type=self.getEventType(record)\n msg=self.format(record)\n self._welu.ReportEvent(self.appname,id,cat,type,[msg])\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \n def close(self):\n ''\n\n\n\n\n\n\n\n \n \n logging.Handler.close(self)\n \nclass HTTPHandler(logging.Handler):\n ''\n\n\n \n def __init__(self,host,url,method=\"GET\",secure=False ,credentials=None ):\n ''\n\n\n \n logging.Handler.__init__(self)\n method=method.upper()\n if method not in [\"GET\",\"POST\"]:\n raise ValueError(\"method must be GET or POST\")\n self.host=host\n self.url=url\n self.method=method\n self.secure=secure\n self.credentials=credentials\n \n def mapLogRecord(self,record):\n ''\n\n\n\n \n return record.__dict__\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n import http.client,urllib.parse\n host=self.host\n if self.secure:\n h=http.client.HTTPSConnection(host)\n else :\n h=http.client.HTTPConnection(host)\n url=self.url\n data=urllib.parse.urlencode(self.mapLogRecord(record))\n if self.method ==\"GET\":\n if (url.find('?')>=0):\n sep='&'\n else :\n sep='?'\n url=url+\"%c%s\"%(sep,data)\n h.putrequest(self.method,url)\n \n \n i=host.find(\":\")\n if i >=0:\n host=host[:i]\n h.putheader(\"Host\",host)\n if self.method ==\"POST\":\n h.putheader(\"Content-type\",\n \"application/x-www-form-urlencoded\")\n h.putheader(\"Content-length\",str(len(data)))\n if self.credentials:\n import base64\n s=('u%s:%s'%self.credentials).encode('utf-8')\n s='Basic '+base64.b64encode(s).strip()\n h.putheader('Authorization',s)\n h.endheaders()\n if self.method ==\"POST\":\n h.send(data.encode('utf-8'))\n h.getresponse()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass BufferingHandler(logging.Handler):\n ''\n\n\n\n \n def __init__(self,capacity):\n ''\n\n \n logging.Handler.__init__(self)\n self.capacity=capacity\n self.buffer=[]\n \n def shouldFlush(self,record):\n ''\n\n\n\n\n \n return (len(self.buffer)>=self.capacity)\n \n def emit(self,record):\n ''\n\n\n\n\n \n self.buffer.append(record)\n if self.shouldFlush(record):\n self.flush()\n \n def flush(self):\n ''\n\n\n\n \n self.acquire()\n try :\n self.buffer=[]\n finally :\n self.release()\n \n def close(self):\n ''\n\n\n\n \n self.flush()\n logging.Handler.close(self)\n \nclass MemoryHandler(BufferingHandler):\n ''\n\n\n\n \n def __init__(self,capacity,flushLevel=logging.ERROR,target=None ):\n ''\n\n\n\n\n\n \n BufferingHandler.__init__(self,capacity)\n self.flushLevel=flushLevel\n self.target=target\n \n def shouldFlush(self,record):\n ''\n\n \n return (len(self.buffer)>=self.capacity)or\\\n (record.levelno >=self.flushLevel)\n \n def setTarget(self,target):\n ''\n\n \n self.target=target\n \n def flush(self):\n ''\n\n\n\n\n\n \n self.acquire()\n try :\n if self.target:\n for record in self.buffer:\n self.target.handle(record)\n self.buffer=[]\n finally :\n self.release()\n \n def close(self):\n ''\n\n \n self.flush()\n self.acquire()\n try :\n self.target=None\n BufferingHandler.close(self)\n finally :\n self.release()\n \n \nclass QueueHandler(logging.Handler):\n ''\n\n\n\n\n\n\n\n \n \n def __init__(self,queue):\n ''\n\n \n logging.Handler.__init__(self)\n self.queue=queue\n \n def enqueue(self,record):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(record)\n \n def prepare(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n \n \n \n \n \n \n self.format(record)\n record.msg=record.message\n record.args=None\n record.exc_info=None\n return record\n \n def emit(self,record):\n ''\n\n\n\n \n try :\n self.enqueue(self.prepare(record))\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nif threading:\n class QueueListener(object):\n ''\n\n\n\n \n _sentinel=None\n \n def __init__(self,queue,*handlers):\n ''\n\n\n \n self.queue=queue\n self.handlers=handlers\n self._stop=threading.Event()\n self._thread=None\n \n def dequeue(self,block):\n ''\n\n\n\n\n \n return self.queue.get(block)\n \n def start(self):\n ''\n\n\n\n\n \n self._thread=t=threading.Thread(target=self._monitor)\n t.setDaemon(True )\n t.start()\n \n def prepare(self,record):\n ''\n\n\n\n\n\n \n return record\n \n def handle(self,record):\n ''\n\n\n\n\n \n record=self.prepare(record)\n for handler in self.handlers:\n handler.handle(record)\n \n def _monitor(self):\n ''\n\n\n\n\n\n \n q=self.queue\n has_task_done=hasattr(q,'task_done')\n while not self._stop.isSet():\n try :\n record=self.dequeue(True )\n if record is self._sentinel:\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n pass\n \n while True :\n try :\n record=self.dequeue(False )\n if record is self._sentinel:\n break\n self.handle(record)\n if has_task_done:\n q.task_done()\n except queue.Empty:\n break\n \n def enqueue_sentinel(self):\n ''\n\n\n\n\n\n \n self.queue.put_nowait(self._sentinel)\n \n def stop(self):\n ''\n\n\n\n\n\n \n self._stop.set()\n self.enqueue_sentinel()\n self._thread.join()\n self._thread=None\n", ["base64", "codecs", "email.utils", "errno", "http.client", "logging", "logging.brython_handlers", "os", "pickle", "queue", "re", "smtplib", "socket", "stat", "struct", "threading", "time", "urllib.parse", "win32evtlog", "win32evtlogutil"]], "logging": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\"\"\"\nLogging package for Python. Based on PEP 282 and comments thereto in\ncomp.lang.python.\n\nCopyright (C) 2001-2013 Vinay Sajip. All Rights Reserved.\n\nTo use, simply 'import logging' and log away!\n\"\"\"\n\nimport sys,os,time,io,traceback,warnings,weakref\nfrom string import Template\nfrom browser import console\n\n__all__=['BASIC_FORMAT','BufferingFormatter','CRITICAL','DEBUG','ERROR',\n'FATAL','FileHandler','Filter','Formatter','Handler','INFO',\n'LogRecord','Logger','LoggerAdapter','NOTSET','NullHandler',\n'StreamHandler','ConsoleHandler','WARN','WARNING','addLevelName','basicConfig',\n'captureWarnings','critical','debug','disable','error',\n'exception','fatal','getLevelName','getLogger','getLoggerClass',\n'info','log','makeLogRecord','setLoggerClass','warn','warning',\n'getLogRecordFactory','setLogRecordFactory','lastResort']\n\ntry :\n import threading\nexcept ImportError:\n threading=None\n \n__author__=\"Vinay Sajip \"\n__status__=\"production\"\n__version__=\"0.5.1.2\"\n__date__=\"07 February 2010\"\n\n\n\n\n\n\n\n\n\nif hasattr(sys,'frozen'):\n _srcfile=\"logging%s__init__%s\"%(os.sep,__file__[-4:])\nelse :\n _srcfile=__file__\n_srcfile=os.path.normcase(_srcfile)\n\n\nif hasattr(sys,'_getframe'):\n currentframe=lambda :sys._getframe(3)\nelse :\n def currentframe():\n ''\n try :\n raise Exception\n except :\n return sys.exc_info()[2].tb_frame.f_back\n \n \n \n \n \n \n \n \n \n \n \n_startTime=time.time()\n\n\n\n\n\nraiseExceptions=True\n\n\n\n\nlogThreads=True\n\n\n\n\nlogMultiprocessing=True\n\n\n\n\nlogProcesses=True\n\n\n\n\n\n\n\n\n\n\n\n\nCRITICAL=50\nFATAL=CRITICAL\nERROR=40\nWARNING=30\nWARN=WARNING\nINFO=20\nDEBUG=10\nNOTSET=0\n\n_levelNames={\nCRITICAL:'CRITICAL',\nERROR:'ERROR',\nWARNING:'WARNING',\nINFO:'INFO',\nDEBUG:'DEBUG',\nNOTSET:'NOTSET',\n'CRITICAL':CRITICAL,\n'ERROR':ERROR,\n'WARN':WARNING,\n'WARNING':WARNING,\n'INFO':INFO,\n'DEBUG':DEBUG,\n'NOTSET':NOTSET,\n}\n\ndef getLevelName(level):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n return _levelNames.get(level,(\"Level %s\"%level))\n \ndef addLevelName(level,levelName):\n ''\n\n\n\n \n _acquireLock()\n try :\n _levelNames[level]=levelName\n _levelNames[levelName]=level\n finally :\n _releaseLock()\n \ndef _checkLevel(level):\n if isinstance(level,int):\n rv=level\n elif str(level)==level:\n if level not in _levelNames:\n raise ValueError(\"Unknown level: %r\"%level)\n rv=_levelNames[level]\n else :\n raise TypeError(\"Level not an integer or a valid string: %r\"%level)\n return rv\n \n \n \n \n \n \n \n \n \n \n \n \n \nif threading:\n _lock=threading.RLock()\nelse :\n _lock=None\n \n \ndef _acquireLock():\n ''\n\n\n\n \n if _lock:\n _lock.acquire()\n \ndef _releaseLock():\n ''\n\n \n if _lock:\n _lock.release()\n \n \n \n \n \nclass LogRecord(object):\n ''\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level,pathname,lineno,\n msg,args,exc_info,func=None ,sinfo=None ,**kwargs):\n ''\n\n \n ct=time.time()\n self.name=name\n self.msg=msg\n \n \n \n \n \n \n \n \n \n \n \n \n \n if args and len(args)==1 and isinstance(args[0],dict)and args[0]:\n args=args[0]\n self.args=args\n self.levelname=getLevelName(level)\n self.levelno=level\n self.pathname=pathname\n try :\n self.filename=os.path.basename(pathname)\n self.module=os.path.splitext(self.filename)[0]\n except (TypeError,ValueError,AttributeError):\n self.filename=pathname\n self.module=\"Unknown module\"\n self.exc_info=exc_info\n self.exc_text=None\n self.stack_info=sinfo\n self.lineno=lineno\n self.funcName=func\n self.created=ct\n self.msecs=(ct -int(ct))*1000\n self.relativeCreated=(self.created -_startTime)*1000\n if logThreads and threading:\n self.thread=threading.get_ident()\n self.threadName=threading.current_thread().name\n else :\n self.thread=None\n self.threadName=None\n if not logMultiprocessing:\n self.processName=None\n else :\n self.processName='MainProcess'\n mp=sys.modules.get('multiprocessing')\n if mp is not None :\n \n \n \n \n try :\n self.processName=mp.current_process().name\n except Exception:\n pass\n if logProcesses and hasattr(os,'getpid'):\n self.process=os.getpid()\n else :\n self.process=None\n \n def __str__(self):\n return ''%(self.name,self.levelno,\n self.pathname,self.lineno,self.msg)\n \n def getMessage(self):\n ''\n\n\n\n\n \n msg=str(self.msg)\n if self.args:\n msg=msg %self.args\n return msg\n \n \n \n \n_logRecordFactory=LogRecord\n\ndef setLogRecordFactory(factory):\n ''\n\n\n\n\n \n global _logRecordFactory\n _logRecordFactory=factory\n \ndef getLogRecordFactory():\n ''\n\n \n \n return _logRecordFactory\n \ndef makeLogRecord(dict):\n ''\n\n\n\n\n \n rv=_logRecordFactory(None ,None ,\"\",0,\"\",(),None ,None )\n rv.__dict__.update(dict)\n return rv\n \n \n \n \n \nclass PercentStyle(object):\n\n default_format='%(message)s'\n asctime_format='%(asctime)s'\n asctime_search='%(asctime)'\n \n def __init__(self,fmt):\n self._fmt=fmt or self.default_format\n \n def usesTime(self):\n return self._fmt.find(self.asctime_search)>=0\n \n def format(self,record):\n return self._fmt %record.__dict__\n \nclass StrFormatStyle(PercentStyle):\n default_format='{message}'\n asctime_format='{asctime}'\n asctime_search='{asctime'\n \n def format(self,record):\n return self._fmt.format(**record.__dict__)\n \n \nclass StringTemplateStyle(PercentStyle):\n default_format='${message}'\n asctime_format='${asctime}'\n asctime_search='${asctime}'\n \n def __init__(self,fmt):\n self._fmt=fmt or self.default_format\n self._tpl=Template(self._fmt)\n \n def usesTime(self):\n fmt=self._fmt\n return fmt.find('$asctime')>=0 or fmt.find(self.asctime_format)>=0\n \n def format(self,record):\n return self._tpl.substitute(**record.__dict__)\n \n_STYLES={\n'%':PercentStyle,\n'{':StrFormatStyle,\n'$':StringTemplateStyle\n}\n\nclass Formatter(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n converter=time.localtime\n \n def __init__(self,fmt=None ,datefmt=None ,style='%'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if style not in _STYLES:\n raise ValueError('Style must be one of: %s'%','.join(\n _STYLES.keys()))\n self._style=_STYLES[style](fmt)\n self._fmt=self._style._fmt\n self.datefmt=datefmt\n \n default_time_format='%Y-%m-%d %H:%M:%S'\n default_msec_format='%s,%03d'\n \n def formatTime(self,record,datefmt=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n ct=self.converter(record.created)\n if datefmt:\n s=time.strftime(datefmt,ct)\n else :\n t=time.strftime(self.default_time_format,ct)\n s=self.default_msec_format %(t,record.msecs)\n return s\n \n def formatException(self,ei):\n ''\n\n\n\n\n \n sio=io.StringIO()\n tb=ei[2]\n \n \n \n traceback.print_exc(file=sio)\n s=sio.getvalue()\n sio.close()\n if s[-1:]==\"\\n\":\n s=s[:-1]\n return s\n \n def usesTime(self):\n ''\n\n \n return self._style.usesTime()\n \n def formatMessage(self,record):\n return self._style.format(record)\n \n def formatStack(self,stack_info):\n ''\n\n\n\n\n\n\n\n\n \n return stack_info\n \n def format(self,record):\n ''\n\n\n\n\n\n\n\n\n\n\n \n record.message=record.getMessage()\n if self.usesTime():\n record.asctime=self.formatTime(record,self.datefmt)\n s=self.formatMessage(record)\n if record.exc_info:\n \n \n if not record.exc_text:\n record.exc_text=self.formatException(record.exc_info)\n if record.exc_text:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+record.exc_text\n if record.stack_info:\n if s[-1:]!=\"\\n\":\n s=s+\"\\n\"\n s=s+self.formatStack(record.stack_info)\n return s\n \n \n \n \n_defaultFormatter=Formatter()\n\nclass BufferingFormatter(object):\n ''\n\n \n def __init__(self,linefmt=None ):\n ''\n\n\n \n if linefmt:\n self.linefmt=linefmt\n else :\n self.linefmt=_defaultFormatter\n \n def formatHeader(self,records):\n ''\n\n \n return \"\"\n \n def formatFooter(self,records):\n ''\n\n \n return \"\"\n \n def format(self,records):\n ''\n\n \n rv=\"\"\n if len(records)>0:\n rv=rv+self.formatHeader(records)\n for record in records:\n rv=rv+self.linefmt.format(record)\n rv=rv+self.formatFooter(records)\n return rv\n \n \n \n \n \nclass Filter(object):\n ''\n\n\n\n\n\n\n\n\n \n def __init__(self,name=''):\n ''\n\n\n\n\n\n \n self.name=name\n self.nlen=len(name)\n \n def filter(self,record):\n ''\n\n\n\n\n \n if self.nlen ==0:\n return True\n elif self.name ==record.name:\n return True\n elif record.name.find(self.name,0,self.nlen)!=0:\n return False\n return (record.name[self.nlen]==\".\")\n \nclass Filterer(object):\n ''\n\n\n \n def __init__(self):\n ''\n\n \n self.filters=[]\n \n def addFilter(self,filter):\n ''\n\n \n if not (filter in self.filters):\n self.filters.append(filter)\n \n def removeFilter(self,filter):\n ''\n\n \n if filter in self.filters:\n self.filters.remove(filter)\n \n def filter(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n rv=True\n for f in self.filters:\n if hasattr(f,'filter'):\n result=f.filter(record)\n else :\n result=f(record)\n if not result:\n rv=False\n break\n return rv\n \n \n \n \n \n_handlers=weakref.WeakValueDictionary()\n_handlerList=[]\n\ndef _removeHandlerRef(wr):\n ''\n\n \n \n \n \n if (_acquireLock is not None and _handlerList is not None and\n _releaseLock is not None ):\n _acquireLock()\n try :\n if wr in _handlerList:\n _handlerList.remove(wr)\n finally :\n _releaseLock()\n \ndef _addHandlerRef(handler):\n ''\n\n \n _acquireLock()\n try :\n _handlerList.append(weakref.ref(handler,_removeHandlerRef))\n finally :\n _releaseLock()\n \nclass Handler(Filterer):\n ''\n\n\n\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n\n \n Filterer.__init__(self)\n self._name=None\n self.level=_checkLevel(level)\n self.formatter=None\n \n _addHandlerRef(self)\n self.createLock()\n \n def get_name(self):\n return self._name\n \n def set_name(self,name):\n _acquireLock()\n try :\n if self._name in _handlers:\n del _handlers[self._name]\n self._name=name\n if name:\n _handlers[name]=self\n finally :\n _releaseLock()\n \n name=property(get_name,set_name)\n \n def createLock(self):\n ''\n\n \n if threading:\n self.lock=threading.RLock()\n else :\n self.lock=None\n \n def acquire(self):\n ''\n\n \n if self.lock:\n self.lock.acquire()\n \n def release(self):\n ''\n\n \n if self.lock:\n self.lock.release()\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def format(self,record):\n ''\n\n\n\n\n \n if self.formatter:\n fmt=self.formatter\n else :\n fmt=_defaultFormatter\n return fmt.format(record)\n \n def emit(self,record):\n ''\n\n\n\n\n \n raise NotImplementedError('emit must be implemented '\n 'by Handler subclasses')\n \n def handle(self,record):\n ''\n\n\n\n\n\n\n \n rv=self.filter(record)\n if rv:\n self.acquire()\n try :\n self.emit(record)\n finally :\n self.release()\n return rv\n \n def setFormatter(self,fmt):\n ''\n\n \n self.formatter=fmt\n \n def flush(self):\n ''\n\n\n\n\n \n pass\n \n def close(self):\n ''\n\n\n\n\n\n\n \n \n _acquireLock()\n try :\n if self._name and self._name in _handlers:\n del _handlers[self._name]\n finally :\n _releaseLock()\n \n def handleError(self,record):\n ''\n\n\n\n\n\n\n\n\n\n \n if raiseExceptions and sys.stderr:\n try :\n traceback.print_exc(file=sys.stderr)\n sys.stderr.write('Logged from file %s, line %s\\n'%(\n record.filename,record.lineno))\n except IOError:\n pass\n \nclass StreamHandler(Handler):\n ''\n\n\n\n \n \n terminator='\\n'\n \n def __init__(self,stream=None ):\n ''\n\n\n\n \n Handler.__init__(self)\n if stream is None :\n stream=sys.stderr\n self.stream=stream\n \n def flush(self):\n ''\n\n \n self.acquire()\n try :\n if self.stream and hasattr(self.stream,\"flush\"):\n self.stream.flush()\n finally :\n self.release()\n \n def emit(self,record):\n ''\n\n\n\n\n\n\n\n\n \n try :\n msg=self.format(record)\n stream=self.stream\n stream.write(msg)\n stream.write(self.terminator)\n self.flush()\n except (KeyboardInterrupt,SystemExit):\n raise\n except :\n self.handleError(record)\n \nclass FileHandler(StreamHandler):\n ''\n\n \n def __init__(self,filename,mode='a',encoding=None ,delay=False ):\n ''\n\n \n \n \n self.baseFilename=os.path.abspath(filename)\n self.mode=mode\n self.encoding=encoding\n self.delay=delay\n if delay:\n \n \n Handler.__init__(self)\n self.stream=None\n else :\n StreamHandler.__init__(self,self._open())\n \n def close(self):\n ''\n\n \n self.acquire()\n try :\n if self.stream:\n self.flush()\n if hasattr(self.stream,\"close\"):\n self.stream.close()\n StreamHandler.close(self)\n self.stream=None\n finally :\n self.release()\n \n def _open(self):\n ''\n\n\n \n return open(self.baseFilename,self.mode,encoding=self.encoding)\n \n def emit(self,record):\n ''\n\n\n\n\n \n if self.stream is None :\n self.stream=self._open()\n StreamHandler.emit(self,record)\n \nclass _StderrHandler(StreamHandler):\n ''\n\n\n\n \n def __init__(self,level=NOTSET):\n ''\n\n \n Handler.__init__(self,level)\n \n @property\n def stream(self):\n return sys.stderr\n \n \nclass ConsoleHandler(Handler):\n ''\n\n\n \n \n def emit(self,record):\n ''\n\n\n\n\n\n\n \n try :\n msg=self.format(record)\n console.log(msg)\n except :\n self.handleError(record)\n \n_defaultLastResort=ConsoleHandler(WARNING)\nlastResort=_defaultLastResort\n\n\n\n\n\nclass PlaceHolder(object):\n ''\n\n\n\n \n def __init__(self,alogger):\n ''\n\n \n self.loggerMap={alogger:None }\n \n def append(self,alogger):\n ''\n\n \n if alogger not in self.loggerMap:\n self.loggerMap[alogger]=None\n \n \n \n \n_loggerClass=None\n\ndef setLoggerClass(klass):\n ''\n\n\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n global _loggerClass\n _loggerClass=klass\n \ndef getLoggerClass():\n ''\n\n \n \n return _loggerClass\n \nclass Manager(object):\n ''\n\n\n \n def __init__(self,rootnode):\n ''\n\n \n self.root=rootnode\n self.disable=0\n self.emittedNoHandlerWarning=False\n self.loggerDict={}\n self.loggerClass=None\n self.logRecordFactory=None\n \n def getLogger(self,name):\n ''\n\n\n\n\n\n\n\n\n \n rv=None\n if not isinstance(name,str):\n raise TypeError('A logger name must be a string')\n _acquireLock()\n try :\n if name in self.loggerDict:\n rv=self.loggerDict[name]\n if isinstance(rv,PlaceHolder):\n ph=rv\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupChildren(ph,rv)\n self._fixupParents(rv)\n else :\n rv=(self.loggerClass or _loggerClass)(name)\n rv.manager=self\n self.loggerDict[name]=rv\n self._fixupParents(rv)\n finally :\n _releaseLock()\n return rv\n \n def setLoggerClass(self,klass):\n ''\n\n \n if klass !=Logger:\n if not issubclass(klass,Logger):\n raise TypeError(\"logger not derived from logging.Logger: \"\n +klass.__name__)\n self.loggerClass=klass\n \n def setLogRecordFactory(self,factory):\n ''\n\n\n \n self.logRecordFactory=factory\n \n def _fixupParents(self,alogger):\n ''\n\n\n \n name=alogger.name\n i=name.rfind(\".\")\n rv=None\n while (i >0)and not rv:\n substr=name[:i]\n if substr not in self.loggerDict:\n self.loggerDict[substr]=PlaceHolder(alogger)\n else :\n obj=self.loggerDict[substr]\n if isinstance(obj,Logger):\n rv=obj\n else :\n assert isinstance(obj,PlaceHolder)\n obj.append(alogger)\n i=name.rfind(\".\",0,i -1)\n if not rv:\n rv=self.root\n alogger.parent=rv\n \n def _fixupChildren(self,ph,alogger):\n ''\n\n\n \n name=alogger.name\n namelen=len(name)\n for c in ph.loggerMap.keys():\n \n if c.parent.name[:namelen]!=name:\n alogger.parent=c.parent\n c.parent=alogger\n \n \n \n \n \nclass Logger(Filterer):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __init__(self,name,level=NOTSET):\n ''\n\n \n Filterer.__init__(self)\n self.name=name\n self.level=_checkLevel(level)\n self.parent=None\n self.propagate=True\n self.handlers=[]\n self.disabled=False\n \n def setLevel(self,level):\n ''\n\n \n self.level=_checkLevel(level)\n \n def debug(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(DEBUG):\n self._log(DEBUG,msg,args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(INFO):\n self._log(INFO,msg,args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(WARNING):\n self._log(WARNING,msg,args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(ERROR):\n self._log(ERROR,msg,args,**kwargs)\n \n def exception(self,msg,*args,**kwargs):\n ''\n\n \n kwargs['exc_info']=True\n self.error(msg,*args,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if self.isEnabledFor(CRITICAL):\n self._log(CRITICAL,msg,args,**kwargs)\n \n fatal=critical\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n\n\n\n\n \n if not isinstance(level,int):\n if raiseExceptions:\n raise TypeError(\"level must be an integer\")\n else :\n return\n if self.isEnabledFor(level):\n self._log(level,msg,args,**kwargs)\n \n def findCaller(self,stack_info=False ):\n ''\n\n\n \n f=currentframe()\n \n \n if f is not None :\n f=f.f_back\n rv=\"(unknown file)\",0,\"(unknown function)\",None\n while hasattr(f,\"f_code\"):\n co=f.f_code\n filename=os.path.normcase(co.co_filename)\n if filename ==_srcfile:\n f=f.f_back\n continue\n sinfo=None\n if stack_info:\n sio=io.StringIO()\n sio.write('Stack (most recent call last):\\n')\n traceback.print_stack(f,file=sio)\n sinfo=sio.getvalue()\n if sinfo[-1]=='\\n':\n sinfo=sinfo[:-1]\n sio.close()\n rv=(co.co_filename,f.f_lineno,co.co_name,sinfo)\n break\n return rv\n \n def makeRecord(self,name,level,fn,lno,msg,args,exc_info,\n func=None ,extra=None ,sinfo=None ):\n ''\n\n\n \n rv=_logRecordFactory(name,level,fn,lno,msg,args,exc_info,func,\n sinfo)\n if extra is not None :\n for key in extra:\n if (key in [\"message\",\"asctime\"])or (key in rv.__dict__):\n raise KeyError(\"Attempt to overwrite %r in LogRecord\"%key)\n rv.__dict__[key]=extra[key]\n return rv\n \n def _log(self,level,msg,args,exc_info=None ,extra=None ,stack_info=False ):\n ''\n\n\n \n sinfo=None\n if _srcfile:\n \n \n \n try :\n fn,lno,func,sinfo=self.findCaller(stack_info)\n except ValueError:\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n else :\n fn,lno,func=\"(unknown file)\",0,\"(unknown function)\"\n if exc_info:\n if not isinstance(exc_info,tuple):\n exc_info=sys.exc_info()\n record=self.makeRecord(self.name,level,fn,lno,msg,args,\n exc_info,func,extra,sinfo)\n self.handle(record)\n \n def handle(self,record):\n ''\n\n\n\n\n \n if (not self.disabled)and self.filter(record):\n self.callHandlers(record)\n \n def addHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try :\n if not (hdlr in self.handlers):\n self.handlers.append(hdlr)\n finally :\n _releaseLock()\n \n def removeHandler(self,hdlr):\n ''\n\n \n _acquireLock()\n try :\n if hdlr in self.handlers:\n self.handlers.remove(hdlr)\n finally :\n _releaseLock()\n \n def hasHandlers(self):\n ''\n\n\n\n\n\n\n\n \n c=self\n rv=False\n while c:\n if c.handlers:\n rv=True\n break\n if not c.propagate:\n break\n else :\n c=c.parent\n return rv\n \n def callHandlers(self,record):\n ''\n\n\n\n\n\n\n\n \n c=self\n found=0\n while c:\n for hdlr in c.handlers:\n found=found+1\n if record.levelno >=hdlr.level:\n hdlr.handle(record)\n if not c.propagate:\n c=None\n else :\n c=c.parent\n if (found ==0):\n if lastResort:\n if record.levelno >=lastResort.level:\n lastResort.handle(record)\n elif raiseExceptions and not self.manager.emittedNoHandlerWarning:\n sys.stderr.write(\"No handlers could be found for logger\"\n \" \\\"%s\\\"\\n\"%self.name)\n self.manager.emittedNoHandlerWarning=True\n \n def getEffectiveLevel(self):\n ''\n\n\n\n\n \n logger=self\n while logger:\n if logger.level:\n return logger.level\n logger=logger.parent\n return NOTSET\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.manager.disable >=level:\n return False\n return level >=self.getEffectiveLevel()\n \n def getChild(self,suffix):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n if self.root is not self:\n suffix='.'.join((self.name,suffix))\n return self.manager.getLogger(suffix)\n \nclass RootLogger(Logger):\n ''\n\n\n\n \n def __init__(self,level):\n ''\n\n \n Logger.__init__(self,\"root\",level)\n \n_loggerClass=Logger\n\nclass LoggerAdapter(object):\n ''\n\n\n \n \n def __init__(self,logger,extra):\n ''\n\n\n\n\n\n\n\n\n \n self.logger=logger\n self.extra=extra\n \n def process(self,msg,kwargs):\n ''\n\n\n\n\n\n\n\n \n kwargs[\"extra\"]=self.extra\n return msg,kwargs\n \n \n \n \n def debug(self,msg,*args,**kwargs):\n ''\n\n \n self.log(DEBUG,msg,*args,**kwargs)\n \n def info(self,msg,*args,**kwargs):\n ''\n\n \n self.log(INFO,msg,*args,**kwargs)\n \n def warning(self,msg,*args,**kwargs):\n ''\n\n \n self.log(WARNING,msg,*args,**kwargs)\n \n def warn(self,msg,*args,**kwargs):\n warnings.warn(\"The 'warn' method is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n self.warning(msg,*args,**kwargs)\n \n def error(self,msg,*args,**kwargs):\n ''\n\n \n self.log(ERROR,msg,*args,**kwargs)\n \n def exception(self,msg,*args,**kwargs):\n ''\n\n \n kwargs[\"exc_info\"]=True\n self.log(ERROR,msg,*args,**kwargs)\n \n def critical(self,msg,*args,**kwargs):\n ''\n\n \n self.log(CRITICAL,msg,*args,**kwargs)\n \n def log(self,level,msg,*args,**kwargs):\n ''\n\n\n \n if self.isEnabledFor(level):\n msg,kwargs=self.process(msg,kwargs)\n self.logger._log(level,msg,args,**kwargs)\n \n def isEnabledFor(self,level):\n ''\n\n \n if self.logger.manager.disable >=level:\n return False\n return level >=self.getEffectiveLevel()\n \n def setLevel(self,level):\n ''\n\n \n self.logger.setLevel(level)\n \n def getEffectiveLevel(self):\n ''\n\n \n return self.logger.getEffectiveLevel()\n \n def hasHandlers(self):\n ''\n\n \n return self.logger.hasHandlers()\n \nroot=RootLogger(WARNING)\nLogger.root=root\nLogger.manager=Manager(Logger.root)\n\n\n\n\n\nBASIC_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n\ndef basicConfig(**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \n _acquireLock()\n try :\n if len(root.handlers)==0:\n handlers=kwargs.get(\"handlers\")\n if handlers is None :\n if \"stream\"in kwargs and \"filename\"in kwargs:\n raise ValueError(\"'stream' and 'filename' should not be \"\n \"specified together\")\n else :\n if \"stream\"in kwargs or \"filename\"in kwargs:\n raise ValueError(\"'stream' or 'filename' should not be \"\n \"specified together with 'handlers'\")\n if handlers is None :\n filename=kwargs.get(\"filename\")\n if filename:\n mode=kwargs.get(\"filemode\",'a')\n h=FileHandler(filename,mode)\n else :\n stream=kwargs.get(\"stream\")\n if stream:\n h=StreamHandler(stream)\n else :\n h=ConsoleHandler()\n handlers=[h]\n fs=kwargs.get(\"format\",BASIC_FORMAT)\n dfs=kwargs.get(\"datefmt\",None )\n style=kwargs.get(\"style\",'%')\n fmt=Formatter(fs,dfs,style)\n for h in handlers:\n if h.formatter is None :\n h.setFormatter(fmt)\n root.addHandler(h)\n level=kwargs.get(\"level\")\n if level is not None :\n root.setLevel(level)\n finally :\n _releaseLock()\n \n \n \n \n \n \ndef getLogger(name=None ):\n ''\n\n\n\n \n if name:\n return Logger.manager.getLogger(name)\n else :\n return root\n \ndef critical(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.critical(msg,*args,**kwargs)\n \nfatal=critical\n\ndef error(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.error(msg,*args,**kwargs)\n \ndef exception(msg,*args,**kwargs):\n ''\n\n\n\n \n kwargs['exc_info']=True\n error(msg,*args,**kwargs)\n \ndef warning(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.warning(msg,*args,**kwargs)\n \ndef warn(msg,*args,**kwargs):\n warnings.warn(\"The 'warn' function is deprecated, \"\n \"use 'warning' instead\",DeprecationWarning,2)\n warning(msg,*args,**kwargs)\n \ndef info(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.info(msg,*args,**kwargs)\n \ndef debug(msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.debug(msg,*args,**kwargs)\n \ndef log(level,msg,*args,**kwargs):\n ''\n\n\n\n \n if len(root.handlers)==0:\n basicConfig()\n root.log(level,msg,*args,**kwargs)\n \ndef disable(level):\n ''\n\n \n root.manager.disable=level\n \ndef shutdown(handlerList=_handlerList):\n ''\n\n\n\n\n \n for wr in reversed(handlerList[:]):\n \n \n try :\n h=wr()\n if h:\n try :\n h.acquire()\n h.flush()\n h.close()\n except (IOError,ValueError):\n \n \n \n \n pass\n finally :\n h.release()\n except :\n if raiseExceptions:\n raise\n \n \n \nimport atexit\natexit.register(shutdown)\n\n\n\nclass NullHandler(Handler):\n ''\n\n\n\n\n\n\n\n \n def handle(self,record):\n ''\n \n def emit(self,record):\n ''\n \n def createLock(self):\n self.lock=None\n \n \n \n_warnings_showwarning=None\n\ndef _showwarning(message,category,filename,lineno,file=None ,line=None ):\n ''\n\n\n\n\n\n \n if file is not None :\n if _warnings_showwarning is not None :\n _warnings_showwarning(message,category,filename,lineno,file,line)\n else :\n s=warnings.formatwarning(message,category,filename,lineno,line)\n logger=getLogger(\"py.warnings\")\n if not logger.handlers:\n logger.addHandler(NullHandler())\n logger.warning(\"%s\",s)\n \ndef captureWarnings(capture):\n ''\n\n\n\n \n global _warnings_showwarning\n if capture:\n if _warnings_showwarning is None :\n _warnings_showwarning=warnings.showwarning\n warnings.showwarning=_showwarning\n else :\n if _warnings_showwarning is not None :\n warnings.showwarning=_warnings_showwarning\n _warnings_showwarning=None\n", ["atexit", "browser", "io", "os", "string", "sys", "threading", "time", "traceback", "warnings", "weakref"], 1], "multiprocessing.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None ]\n\n\nclass Listener(object):\n\n def __init__(self,address=None ,family=None ,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True ):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "multiprocessing.pool": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Pool']\n\n\n\n\n\nimport threading\nimport queue\nimport itertools\nimport collections\nimport time\n\nfrom multiprocessing import Process,cpu_count,TimeoutError\nfrom multiprocessing.util import Finalize,debug\n\n\n\n\n\nRUN=0\nCLOSE=1\nTERMINATE=2\n\n\n\n\n\njob_counter=itertools.count()\n\ndef mapstar(args):\n return list(map(*args))\n \ndef starmapstar(args):\n return list(itertools.starmap(args[0],args[1]))\n \n \n \n \n \nclass MaybeEncodingError(Exception):\n ''\n \n \n def __init__(self,exc,value):\n self.exc=repr(exc)\n self.value=repr(value)\n super(MaybeEncodingError,self).__init__(self.exc,self.value)\n \n def __str__(self):\n return \"Error sending result: '%s'. Reason: '%s'\"%(self.value,\n self.exc)\n \n def __repr__(self):\n return \"\"%str(self)\n \n \ndef worker(inqueue,outqueue,initializer=None ,initargs=(),maxtasks=None ):\n assert maxtasks is None or (type(maxtasks)==int and maxtasks >0)\n put=outqueue.put\n get=inqueue.get\n if hasattr(inqueue,'_writer'):\n inqueue._writer.close()\n outqueue._reader.close()\n \n if initializer is not None :\n initializer(*initargs)\n \n completed=0\n while maxtasks is None or (maxtasks and completed 1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return (item for chunk in result for item in chunk)\n \n def imap_unordered(self,func,iterable,chunksize=1):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if chunksize ==1:\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,func,(x,),{})\n for i,x in enumerate(iterable)),result._set_length))\n return result\n else :\n assert chunksize >1\n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=IMapUnorderedIterator(self._cache)\n self._taskqueue.put((((result._job,i,mapstar,(x,),{})\n for i,x in enumerate(task_batches)),result._set_length))\n return (item for chunk in result for item in chunk)\n \n def apply_async(self,func,args=(),kwds={},callback=None ,\n error_callback=None ):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n result=ApplyResult(self._cache,callback,error_callback)\n self._taskqueue.put(([(result._job,None ,func,args,kwds)],None ))\n return result\n \n def map_async(self,func,iterable,chunksize=None ,callback=None ,\n error_callback=None ):\n ''\n\n \n return self._map_async(func,iterable,mapstar,chunksize,callback,\n error_callback)\n \n def _map_async(self,func,iterable,mapper,chunksize=None ,callback=None ,\n error_callback=None ):\n ''\n\n \n if self._state !=RUN:\n raise ValueError(\"Pool not running\")\n if not hasattr(iterable,'__len__'):\n iterable=list(iterable)\n \n if chunksize is None :\n chunksize,extra=divmod(len(iterable),len(self._pool)*4)\n if extra:\n chunksize +=1\n if len(iterable)==0:\n chunksize=0\n \n task_batches=Pool._get_tasks(func,iterable,chunksize)\n result=MapResult(self._cache,chunksize,len(iterable),callback,\n error_callback=error_callback)\n self._taskqueue.put((((result._job,i,mapper,(x,),{})\n for i,x in enumerate(task_batches)),None ))\n return result\n \n @staticmethod\n def _handle_workers(pool):\n thread=threading.current_thread()\n \n \n \n while thread._state ==RUN or (pool._cache and thread._state !=TERMINATE):\n pool._maintain_pool()\n time.sleep(0.1)\n \n pool._taskqueue.put(None )\n debug('worker handler exiting')\n \n @staticmethod\n def _handle_tasks(taskqueue,put,outqueue,pool):\n thread=threading.current_thread()\n \n for taskseq,set_length in iter(taskqueue.get,None ):\n i=-1\n for i,task in enumerate(taskseq):\n if thread._state:\n debug('task handler found thread._state != RUN')\n break\n try :\n put(task)\n except IOError:\n debug('could not put task on queue')\n break\n else :\n if set_length:\n debug('doing set_length()')\n set_length(i+1)\n continue\n break\n else :\n debug('task handler got sentinel')\n \n \n try :\n \n debug('task handler sending sentinel to result handler')\n outqueue.put(None )\n \n \n debug('task handler sending sentinel to workers')\n for p in pool:\n put(None )\n except IOError:\n debug('task handler got IOError when sending sentinels')\n \n debug('task handler exiting')\n \n @staticmethod\n def _handle_results(outqueue,get,cache):\n thread=threading.current_thread()\n \n while 1:\n try :\n task=get()\n except (IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if thread._state:\n assert thread._state ==TERMINATE\n debug('result handler found thread._state=TERMINATE')\n break\n \n if task is None :\n debug('result handler got sentinel')\n break\n \n job,i,obj=task\n try :\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n while cache and thread._state !=TERMINATE:\n try :\n task=get()\n except (IOError,EOFError):\n debug('result handler got EOFError/IOError -- exiting')\n return\n \n if task is None :\n debug('result handler ignoring extra sentinel')\n continue\n job,i,obj=task\n try :\n cache[job]._set(i,obj)\n except KeyError:\n pass\n \n if hasattr(outqueue,'_reader'):\n debug('ensuring that outqueue is not full')\n \n \n \n try :\n for i in range(10):\n if not outqueue._reader.poll():\n break\n get()\n except (IOError,EOFError):\n pass\n \n debug('result handler exiting: len(cache)=%s, thread._state=%s',\n len(cache),thread._state)\n \n @staticmethod\n def _get_tasks(func,it,size):\n it=iter(it)\n while 1:\n x=tuple(itertools.islice(it,size))\n if not x:\n return\n yield (func,x)\n \n def __reduce__(self):\n raise NotImplementedError(\n 'pool objects cannot be passed between processes or pickled'\n )\n \n def close(self):\n debug('closing pool')\n if self._state ==RUN:\n self._state=CLOSE\n self._worker_handler._state=CLOSE\n \n def terminate(self):\n debug('terminating pool')\n self._state=TERMINATE\n self._worker_handler._state=TERMINATE\n self._terminate()\n \n def join(self):\n debug('joining pool')\n assert self._state in (CLOSE,TERMINATE)\n self._worker_handler.join()\n self._task_handler.join()\n self._result_handler.join()\n for p in self._pool:\n p.join()\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n debug('removing tasks from inqueue until task handler finished')\n inqueue._rlock.acquire()\n while task_handler.is_alive()and inqueue._reader.poll():\n inqueue._reader.recv()\n time.sleep(0)\n \n @classmethod\n def _terminate_pool(cls,taskqueue,inqueue,outqueue,pool,\n worker_handler,task_handler,result_handler,cache):\n \n debug('finalizing pool')\n \n worker_handler._state=TERMINATE\n task_handler._state=TERMINATE\n \n debug('helping task handler/workers to finish')\n cls._help_stuff_finish(inqueue,task_handler,len(pool))\n \n assert result_handler.is_alive()or len(cache)==0\n \n result_handler._state=TERMINATE\n outqueue.put(None )\n \n \n \n debug('joining worker handler')\n if threading.current_thread()is not worker_handler:\n worker_handler.join()\n \n \n if pool and hasattr(pool[0],'terminate'):\n debug('terminating workers')\n for p in pool:\n if p.exitcode is None :\n p.terminate()\n \n debug('joining task handler')\n if threading.current_thread()is not task_handler:\n task_handler.join()\n \n debug('joining result handler')\n if threading.current_thread()is not result_handler:\n result_handler.join()\n \n if pool and hasattr(pool[0],'terminate'):\n debug('joining pool workers')\n for p in pool:\n if p.is_alive():\n \n debug('cleaning up worker %d'%p.pid)\n p.join()\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_val,exc_tb):\n self.terminate()\n \n \n \n \n \nclass ApplyResult(object):\n\n def __init__(self,cache,callback,error_callback):\n self._event=threading.Event()\n self._job=next(job_counter)\n self._cache=cache\n self._callback=callback\n self._error_callback=error_callback\n cache[self._job]=self\n \n def ready(self):\n return self._event.is_set()\n \n def successful(self):\n assert self.ready()\n return self._success\n \n def wait(self,timeout=None ):\n self._event.wait(timeout)\n \n def get(self,timeout=None ):\n self.wait(timeout)\n if not self.ready():\n raise TimeoutError\n if self._success:\n return self._value\n else :\n raise self._value\n \n def _set(self,i,obj):\n self._success,self._value=obj\n if self._callback and self._success:\n self._callback(self._value)\n if self._error_callback and not self._success:\n self._error_callback(self._value)\n self._event.set()\n del self._cache[self._job]\n \nAsyncResult=ApplyResult\n\n\n\n\n\nclass MapResult(ApplyResult):\n\n def __init__(self,cache,chunksize,length,callback,error_callback):\n ApplyResult.__init__(self,cache,callback,\n error_callback=error_callback)\n self._success=True\n self._value=[None ]*length\n self._chunksize=chunksize\n if chunksize <=0:\n self._number_left=0\n self._event.set()\n del cache[self._job]\n else :\n self._number_left=length //chunksize+bool(length %chunksize)\n \n def _set(self,i,success_result):\n success,result=success_result\n if success:\n self._value[i *self._chunksize:(i+1)*self._chunksize]=result\n self._number_left -=1\n if self._number_left ==0:\n if self._callback:\n self._callback(self._value)\n del self._cache[self._job]\n self._event.set()\n else :\n self._success=False\n self._value=result\n if self._error_callback:\n self._error_callback(self._value)\n del self._cache[self._job]\n self._event.set()\n \n \n \n \n \nclass IMapIterator(object):\n\n def __init__(self,cache):\n self._cond=threading.Condition(threading.Lock())\n self._job=next(job_counter)\n self._cache=cache\n self._items=collections.deque()\n self._index=0\n self._length=None\n self._unsorted={}\n cache[self._job]=self\n \n def __iter__(self):\n return self\n \n def next(self,timeout=None ):\n self._cond.acquire()\n try :\n try :\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n self._cond.wait(timeout)\n try :\n item=self._items.popleft()\n except IndexError:\n if self._index ==self._length:\n raise StopIteration\n raise TimeoutError\n finally :\n self._cond.release()\n \n success,value=item\n if success:\n return value\n raise value\n \n __next__=next\n \n def _set(self,i,obj):\n self._cond.acquire()\n try :\n if self._index ==i:\n self._items.append(obj)\n self._index +=1\n while self._index in self._unsorted:\n obj=self._unsorted.pop(self._index)\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n else :\n self._unsorted[i]=obj\n \n if self._index ==self._length:\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n def _set_length(self,length):\n self._cond.acquire()\n try :\n self._length=length\n if self._index ==self._length:\n self._cond.notify()\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n \n \n \n \nclass IMapUnorderedIterator(IMapIterator):\n\n def _set(self,i,obj):\n self._cond.acquire()\n try :\n self._items.append(obj)\n self._index +=1\n self._cond.notify()\n if self._index ==self._length:\n del self._cache[self._job]\n finally :\n self._cond.release()\n \n \n \n \n \nclass ThreadPool(Pool):\n\n from .dummy import Process\n \n def __init__(self,processes=None ,initializer=None ,initargs=()):\n Pool.__init__(self,processes,initializer,initargs)\n \n def _setup_queues(self):\n self._inqueue=queue.Queue()\n self._outqueue=queue.Queue()\n self._quick_put=self._inqueue.put\n self._quick_get=self._outqueue.get\n \n @staticmethod\n def _help_stuff_finish(inqueue,task_handler,size):\n \n inqueue.not_empty.acquire()\n try :\n inqueue.queue.clear()\n inqueue.queue.extend([None ]*size)\n inqueue.not_empty.notify_all()\n finally :\n inqueue.not_empty.release()\n", ["collections", "itertools", "multiprocessing", "multiprocessing.Process", "multiprocessing.dummy", "multiprocessing.queues", "multiprocessing.util", "queue", "threading", "time"]], "multiprocessing.process": [".py", "\n\n\n\n\n\n\n\n\n__all__=['Process','current_process','active_children']\n\n\n\n\n\nimport os\nimport sys\nimport signal\nimport itertools\nfrom _weakrefset import WeakSet\n\n\nfrom _multiprocessing import Process\n\n\n\n\ntry :\n ORIGINAL_DIR=os.path.abspath(os.getcwd())\nexcept OSError:\n ORIGINAL_DIR=None\n \n \n \n \n \ndef current_process():\n ''\n\n \n return _current_process\n \ndef active_children():\n ''\n\n \n _cleanup()\n return list(_current_process._children)\n \n \n \n \n \ndef _cleanup():\n\n for p in list(_current_process._children):\n if p._popen.poll()is not None :\n _current_process._children.discard(p)\n \n \n \n \n \n \n \n \n \n \n \n \nclass AuthenticationString(bytes):\n def __reduce__(self):\n from .forking import Popen\n if not Popen.thread_is_spawning():\n raise TypeError(\n 'Pickling an AuthenticationString object is '\n 'disallowed for security reasons'\n )\n return AuthenticationString,(bytes(self),)\n \n \n \n \n \nclass _MainProcess(Process):\n\n def __init__(self):\n self._identity=()\n self._daemonic=False\n self._name='MainProcess'\n self._parent_pid=None\n self._popen=None\n self._counter=itertools.count(1)\n self._children=set()\n self._authkey=AuthenticationString(os.urandom(32))\n self._tempdir=None\n \n_current_process=_MainProcess()\ndel _MainProcess\n\n\n\n\n\n_exitcode_to_name={}\n\nfor name,signum in list(signal.__dict__.items()):\n if name[:3]=='SIG'and '_'not in name:\n _exitcode_to_name[-signum]=name\n \n \n_dangling=WeakSet()\n", ["_multiprocessing", "_weakrefset", "itertools", "multiprocessing.forking", "os", "signal", "sys"]], "multiprocessing.util": [".py", "\n\n\n\n\n\n\n\n\nimport sys\nimport functools\nimport os\nimport itertools\nimport weakref\nimport atexit\nimport threading\n\nfrom subprocess import _args_from_interpreter_flags\n\nfrom multiprocessing.process import current_process,active_children\n\n__all__=[\n'sub_debug','debug','info','sub_warning','get_logger',\n'log_to_stderr','get_temp_dir','register_after_fork',\n'is_exiting','Finalize','ForkAwareThreadLock','ForkAwareLocal',\n'SUBDEBUG','SUBWARNING',\n]\n\n\n\n\n\nNOTSET=0\nSUBDEBUG=5\nDEBUG=10\nINFO=20\nSUBWARNING=25\n\nLOGGER_NAME='multiprocessing'\nDEFAULT_LOGGING_FORMAT='[%(levelname)s/%(processName)s] %(message)s'\n\n_logger=None\n_log_to_stderr=False\n\ndef sub_debug(msg,*args):\n if _logger:\n _logger.log(SUBDEBUG,msg,*args)\n \ndef debug(msg,*args):\n if _logger:\n _logger.log(DEBUG,msg,*args)\n \ndef info(msg,*args):\n if _logger:\n _logger.log(INFO,msg,*args)\n \ndef sub_warning(msg,*args):\n if _logger:\n _logger.log(SUBWARNING,msg,*args)\n \ndef get_logger():\n ''\n\n \n global _logger\n import logging\n \n logging._acquireLock()\n try :\n if not _logger:\n \n _logger=logging.getLogger(LOGGER_NAME)\n _logger.propagate=0\n logging.addLevelName(SUBDEBUG,'SUBDEBUG')\n logging.addLevelName(SUBWARNING,'SUBWARNING')\n \n \n if hasattr(atexit,'unregister'):\n atexit.unregister(_exit_function)\n atexit.register(_exit_function)\n else :\n atexit._exithandlers.remove((_exit_function,(),{}))\n atexit._exithandlers.append((_exit_function,(),{}))\n \n finally :\n logging._releaseLock()\n \n return _logger\n \ndef log_to_stderr(level=None ):\n ''\n\n \n global _log_to_stderr\n import logging\n \n logger=get_logger()\n formatter=logging.Formatter(DEFAULT_LOGGING_FORMAT)\n handler=logging.StreamHandler()\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n \n if level:\n logger.setLevel(level)\n _log_to_stderr=True\n return _logger\n \n \n \n \n \ndef get_temp_dir():\n\n if current_process()._tempdir is None :\n import shutil,tempfile\n tempdir=tempfile.mkdtemp(prefix='pymp-')\n info('created temp directory %s',tempdir)\n Finalize(None ,shutil.rmtree,args=[tempdir],exitpriority=-100)\n current_process()._tempdir=tempdir\n return current_process()._tempdir\n \n \n \n \n \n_afterfork_registry=weakref.WeakValueDictionary()\n_afterfork_counter=itertools.count()\n\ndef _run_after_forkers():\n items=list(_afterfork_registry.items())\n items.sort()\n for (index,ident,func),obj in items:\n try :\n func(obj)\n except Exception as e:\n info('after forker raised exception %s',e)\n \ndef register_after_fork(obj,func):\n _afterfork_registry[(next(_afterfork_counter),id(obj),func)]=obj\n \n \n \n \n \n_finalizer_registry={}\n_finalizer_counter=itertools.count()\n\n\nclass Finalize(object):\n ''\n\n \n def __init__(self,obj,callback,args=(),kwargs=None ,exitpriority=None ):\n assert exitpriority is None or type(exitpriority)is int\n \n if obj is not None :\n self._weakref=weakref.ref(obj,self)\n else :\n assert exitpriority is not None\n \n self._callback=callback\n self._args=args\n self._kwargs=kwargs or {}\n self._key=(exitpriority,next(_finalizer_counter))\n self._pid=os.getpid()\n \n _finalizer_registry[self._key]=self\n \n def __call__(self,wr=None ,\n \n \n _finalizer_registry=_finalizer_registry,\n sub_debug=sub_debug,getpid=os.getpid):\n ''\n\n \n try :\n del _finalizer_registry[self._key]\n except KeyError:\n sub_debug('finalizer no longer registered')\n else :\n if self._pid !=getpid():\n sub_debug('finalizer ignored because different process')\n res=None\n else :\n sub_debug('finalizer calling %s with args %s and kwargs %s',\n self._callback,self._args,self._kwargs)\n res=self._callback(*self._args,**self._kwargs)\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n return res\n \n def cancel(self):\n ''\n\n \n try :\n del _finalizer_registry[self._key]\n except KeyError:\n pass\n else :\n self._weakref=self._callback=self._args=\\\n self._kwargs=self._key=None\n \n def still_active(self):\n ''\n\n \n return self._key in _finalizer_registry\n \n def __repr__(self):\n try :\n obj=self._weakref()\n except (AttributeError,TypeError):\n obj=None\n \n if obj is None :\n return ''\n \n x=''\n \n \ndef _run_finalizers(minpriority=None ):\n ''\n\n\n\n\n \n if _finalizer_registry is None :\n \n \n \n return\n \n if minpriority is None :\n f=lambda p:p[0][0]is not None\n else :\n f=lambda p:p[0][0]is not None and p[0][0]>=minpriority\n \n items=[x for x in list(_finalizer_registry.items())if f(x)]\n items.sort(reverse=True )\n \n for key,finalizer in items:\n sub_debug('calling %s',finalizer)\n try :\n finalizer()\n except Exception:\n import traceback\n traceback.print_exc()\n \n if minpriority is None :\n _finalizer_registry.clear()\n \n \n \n \n \ndef is_exiting():\n ''\n\n \n return _exiting or _exiting is None\n \n_exiting=False\n\ndef _exit_function(info=info,debug=debug,_run_finalizers=_run_finalizers,\nactive_children=active_children,\ncurrent_process=current_process):\n\n\n\n\n global _exiting\n \n if not _exiting:\n _exiting=True\n \n info('process shutting down')\n debug('running all \"atexit\" finalizers with priority >= 0')\n _run_finalizers(0)\n \n if current_process()is not None :\n \n \n \n \n \n \n \n \n \n \n \n \n \n for p in active_children():\n if p._daemonic:\n info('calling terminate() for daemon %s',p.name)\n p._popen.terminate()\n \n for p in active_children():\n info('calling join() for process %s',p.name)\n p.join()\n \n debug('running the remaining \"atexit\" finalizers')\n _run_finalizers()\n \natexit.register(_exit_function)\n\n\n\n\n\nclass ForkAwareThreadLock(object):\n def __init__(self):\n self._reset()\n register_after_fork(self,ForkAwareThreadLock._reset)\n \n def _reset(self):\n self._lock=threading.Lock()\n self.acquire=self._lock.acquire\n self.release=self._lock.release\n \nclass ForkAwareLocal(threading.local):\n def __init__(self):\n register_after_fork(self,lambda obj:obj.__dict__.clear())\n def __reduce__(self):\n return type(self),()\n", ["atexit", "functools", "itertools", "logging", "multiprocessing.process", "os", "shutil", "subprocess", "sys", "tempfile", "threading", "traceback", "weakref"]], "multiprocessing": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__version__='0.70a1'\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Manager','Pipe','cpu_count','log_to_stderr','get_logger',\n'allow_connection_pickling','BufferTooShort','TimeoutError',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','SimpleQueue','JoinableQueue','Pool',\n'Value','Array','RawValue','RawArray','SUBDEBUG','SUBWARNING',\n]\n\n__author__='R. Oudkerk (r.m.oudkerk@gmail.com)'\n\n\n\n\n\nimport os\nimport sys\n\nfrom multiprocessing.process import Process,current_process,active_children\nfrom multiprocessing.util import SUBDEBUG,SUBWARNING\n\n\n\n\n\nclass ProcessError(Exception):\n pass\n \nclass BufferTooShort(ProcessError):\n pass\n \nclass TimeoutError(ProcessError):\n pass\n \nclass AuthenticationError(ProcessError):\n pass\n \nimport _multiprocessing\n\n\n\n\n\ndef Manager():\n ''\n\n\n\n\n \n from multiprocessing.managers import SyncManager\n m=SyncManager()\n m.start()\n return m\n \n \n \n \n \n \n \n \n \ndef cpu_count():\n ''\n\n \n if sys.platform =='win32':\n try :\n num=int(os.environ['NUMBER_OF_PROCESSORS'])\n except (ValueError,KeyError):\n num=0\n elif 'bsd'in sys.platform or sys.platform =='darwin':\n comm='/sbin/sysctl -n hw.ncpu'\n if sys.platform =='darwin':\n comm='/usr'+comm\n try :\n with os.popen(comm)as p:\n num=int(p.read())\n except ValueError:\n num=0\n else :\n try :\n num=os.sysconf('SC_NPROCESSORS_ONLN')\n except (ValueError,OSError,AttributeError):\n num=0\n \n if num >=1:\n return num\n else :\n raise NotImplementedError('cannot determine number of cpus')\n \ndef freeze_support():\n ''\n\n\n \n if sys.platform =='win32'and getattr(sys,'frozen',False ):\n from multiprocessing.forking import freeze_support\n freeze_support()\n \ndef get_logger():\n ''\n\n \n from multiprocessing.util import get_logger\n return get_logger()\n \ndef log_to_stderr(level=None ):\n ''\n\n \n from multiprocessing.util import log_to_stderr\n return log_to_stderr(level)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef Lock():\n ''\n\n \n from multiprocessing.synchronize import Lock\n return Lock()\n \ndef RLock():\n ''\n\n \n from multiprocessing.synchronize import RLock\n return RLock()\n \ndef Condition(lock=None ):\n ''\n\n \n from multiprocessing.synchronize import Condition\n return Condition(lock)\n \ndef Semaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import Semaphore\n return Semaphore(value)\n \ndef BoundedSemaphore(value=1):\n ''\n\n \n from multiprocessing.synchronize import BoundedSemaphore\n return BoundedSemaphore(value)\n \ndef Event():\n ''\n\n \n from multiprocessing.synchronize import Event\n return Event()\n \ndef Barrier(parties,action=None ,timeout=None ):\n ''\n\n \n from multiprocessing.synchronize import Barrier\n return Barrier(parties,action,timeout)\n \ndef Queue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import Queue\n return Queue(maxsize)\n \ndef JoinableQueue(maxsize=0):\n ''\n\n \n from multiprocessing.queues import JoinableQueue\n return JoinableQueue(maxsize)\n \ndef SimpleQueue():\n ''\n\n \n from multiprocessing.queues import SimpleQueue\n return SimpleQueue()\n \ndef Pool(processes=None ,initializer=None ,initargs=(),maxtasksperchild=None ):\n ''\n\n \n from multiprocessing.pool import Pool\n return Pool(processes,initializer,initargs,maxtasksperchild)\n \ndef RawValue(typecode_or_type,*args):\n ''\n\n \n from multiprocessing.sharedctypes import RawValue\n return RawValue(typecode_or_type,*args)\n \ndef RawArray(typecode_or_type,size_or_initializer):\n ''\n\n \n from multiprocessing.sharedctypes import RawArray\n return RawArray(typecode_or_type,size_or_initializer)\n \ndef Value(typecode_or_type,*args,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Value\n return Value(typecode_or_type,*args,lock=lock)\n \ndef Array(typecode_or_type,size_or_initializer,*,lock=True ):\n ''\n\n \n from multiprocessing.sharedctypes import Array\n return Array(typecode_or_type,size_or_initializer,lock=lock)\n \n \n \n \n \nif sys.platform =='win32':\n\n def set_executable(executable):\n ''\n\n\n\n \n from multiprocessing.forking import set_executable\n set_executable(executable)\n \n __all__ +=['set_executable']\n", ["_multiprocessing", "multiprocessing.forking", "multiprocessing.managers", "multiprocessing.pool", "multiprocessing.process", "multiprocessing.queues", "multiprocessing.sharedctypes", "multiprocessing.synchronize", "multiprocessing.util", "os", "sys"], 1], "multiprocessing.dummy.connection": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=['Client','Listener','Pipe']\n\nfrom queue import Queue\n\n\nfamilies=[None ]\n\n\nclass Listener(object):\n\n def __init__(self,address=None ,family=None ,backlog=1):\n self._backlog_queue=Queue(backlog)\n \n def accept(self):\n return Connection(*self._backlog_queue.get())\n \n def close(self):\n self._backlog_queue=None\n \n address=property(lambda self:self._backlog_queue)\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n \n \ndef Client(address):\n _in,_out=Queue(),Queue()\n address.put((_out,_in))\n return Connection(_in,_out)\n \n \ndef Pipe(duplex=True ):\n a,b=Queue(),Queue()\n return Connection(a,b),Connection(b,a)\n \n \nclass Connection(object):\n\n def __init__(self,_in,_out):\n self._out=_out\n self._in=_in\n self.send=self.send_bytes=_out.put\n self.recv=self.recv_bytes=_in.get\n \n def poll(self,timeout=0.0):\n if self._in.qsize()>0:\n return True\n if timeout <=0.0:\n return False\n self._in.not_empty.acquire()\n self._in.not_empty.wait(timeout)\n self._in.not_empty.release()\n return self._in.qsize()>0\n \n def close(self):\n pass\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,exc_tb):\n self.close()\n", ["queue"]], "multiprocessing.dummy": [".py", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n__all__=[\n'Process','current_process','active_children','freeze_support',\n'Lock','RLock','Semaphore','BoundedSemaphore','Condition',\n'Event','Barrier','Queue','Manager','Pipe','Pool','JoinableQueue'\n]\n\n\n\n\n\nimport threading\nimport sys\nimport weakref\n\n\n\nfrom multiprocessing.dummy.connection import Pipe\nfrom threading import Lock,RLock,Semaphore,BoundedSemaphore\nfrom threading import Event,Condition,Barrier\nfrom queue import Queue\n\n\n\n\n\nclass DummyProcess(threading.Thread):\n\n def __init__(self,group=None ,target=None ,name=None ,args=(),kwargs={}):\n threading.Thread.__init__(self,group,target,name,args,kwargs)\n self._pid=None\n self._children=weakref.WeakKeyDictionary()\n self._start_called=False\n self._parent=current_process()\n \n def start(self):\n assert self._parent is current_process()\n self._start_called=True\n if hasattr(self._parent,'_children'):\n self._parent._children[self]=None\n threading.Thread.start(self)\n \n @property\n def exitcode(self):\n if self._start_called and not self.is_alive():\n return 0\n else :\n return None\n \n \n \n \n \nProcess=DummyProcess\ncurrent_process=threading.current_thread\ncurrent_process()._children=weakref.WeakKeyDictionary()\n\ndef active_children():\n children=current_process()._children\n for p in list(children):\n if not p.is_alive():\n children.pop(p,None )\n return list(children)\n \ndef freeze_support():\n pass\n \n \n \n \n \nclass Namespace(object):\n def __init__(self,**kwds):\n self.__dict__.update(kwds)\n def __repr__(self):\n items=list(self.__dict__.items())\n temp=[]\n for name,value in items:\n if not name.startswith('_'):\n temp.append('%s=%r'%(name,value))\n temp.sort()\n return 'Namespace(%s)'%str.join(', ',temp)\n \ndict=dict\nlist=list\n\n\n\n\n\nclass Value(object):\n def __init__(self,typecode,value,lock=True ):\n self._typecode=typecode\n self._value=value\n def _get(self):\n return self._value\n def _set(self,value):\n self._value=value\n value=property(_get,_set)\n def __repr__(self):\n return '<%r(%r, %r)>'%(type(self).__name__,self._typecode,self._value)\n \ndef Manager():\n return sys.modules[__name__]\n \ndef shutdown():\n pass\n \ndef Pool(processes=None ,initializer=None ,initargs=()):\n from multiprocessing.pool import ThreadPool\n return ThreadPool(processes,initializer,initargs)\n \nJoinableQueue=Queue\n", ["multiprocessing.dummy.connection", "multiprocessing.pool", "queue", "sys", "threading", "weakref"], 1], "pydoc_data.topics": [".py", "\n\ntopics={'assert':'\\nThe ``assert`` statement\\n************************\\n\\nAssert statements are a convenient way to insert debugging assertions\\ninto a program:\\n\\n assert_stmt ::= \"assert\" expression [\",\" expression]\\n\\nThe simple form, ``assert expression``, is equivalent to\\n\\n if __debug__:\\n if not expression: raise AssertionError\\n\\nThe extended form, ``assert expression1, expression2``, is equivalent\\nto\\n\\n if __debug__:\\n if not expression1: raise AssertionError(expression2)\\n\\nThese equivalences assume that ``__debug__`` and ``AssertionError``\\nrefer to the built-in variables with those names. In the current\\nimplementation, the built-in variable ``__debug__`` is ``True`` under\\nnormal circumstances, ``False`` when optimization is requested\\n(command line option -O). The current code generator emits no code\\nfor an assert statement when optimization is requested at compile\\ntime. Note that it is unnecessary to include the source code for the\\nexpression that failed in the error message; it will be displayed as\\npart of the stack trace.\\n\\nAssignments to ``__debug__`` are illegal. The value for the built-in\\nvariable is determined when the interpreter starts.\\n',\n'assignment':'\\nAssignment statements\\n*********************\\n\\nAssignment statements are used to (re)bind names to values and to\\nmodify attributes or items of mutable objects:\\n\\n assignment_stmt ::= (target_list \"=\")+ (expression_list | yield_expression)\\n target_list ::= target (\",\" target)* [\",\"]\\n target ::= identifier\\n | \"(\" target_list \")\"\\n | \"[\" target_list \"]\"\\n | attributeref\\n | subscription\\n | slicing\\n | \"*\" target\\n\\n(See section *Primaries* for the syntax definitions for the last three\\nsymbols.)\\n\\nAn assignment statement evaluates the expression list (remember that\\nthis can be a single expression or a comma-separated list, the latter\\nyielding a tuple) and assigns the single resulting object to each of\\nthe target lists, from left to right.\\n\\nAssignment is defined recursively depending on the form of the target\\n(list). When a target is part of a mutable object (an attribute\\nreference, subscription or slicing), the mutable object must\\nultimately perform the assignment and decide about its validity, and\\nmay raise an exception if the assignment is unacceptable. The rules\\nobserved by various types and the exceptions raised are given with the\\ndefinition of the object types (see section *The standard type\\nhierarchy*).\\n\\nAssignment of an object to a target list, optionally enclosed in\\nparentheses or square brackets, is recursively defined as follows.\\n\\n* If the target list is a single target: The object is assigned to\\n that target.\\n\\n* If the target list is a comma-separated list of targets: The object\\n must be an iterable with the same number of items as there are\\n targets in the target list, and the items are assigned, from left to\\n right, to the corresponding targets.\\n\\n * If the target list contains one target prefixed with an asterisk,\\n called a \"starred\" target: The object must be a sequence with at\\n least as many items as there are targets in the target list, minus\\n one. The first items of the sequence are assigned, from left to\\n right, to the targets before the starred target. The final items\\n of the sequence are assigned to the targets after the starred\\n target. A list of the remaining items in the sequence is then\\n assigned to the starred target (the list can be empty).\\n\\n * Else: The object must be a sequence with the same number of items\\n as there are targets in the target list, and the items are\\n assigned, from left to right, to the corresponding targets.\\n\\nAssignment of an object to a single target is recursively defined as\\nfollows.\\n\\n* If the target is an identifier (name):\\n\\n * If the name does not occur in a ``global`` or ``nonlocal``\\n statement in the current code block: the name is bound to the\\n object in the current local namespace.\\n\\n * Otherwise: the name is bound to the object in the global namespace\\n or the outer namespace determined by ``nonlocal``, respectively.\\n\\n The name is rebound if it was already bound. This may cause the\\n reference count for the object previously bound to the name to reach\\n zero, causing the object to be deallocated and its destructor (if it\\n has one) to be called.\\n\\n* If the target is a target list enclosed in parentheses or in square\\n brackets: The object must be an iterable with the same number of\\n items as there are targets in the target list, and its items are\\n assigned, from left to right, to the corresponding targets.\\n\\n* If the target is an attribute reference: The primary expression in\\n the reference is evaluated. It should yield an object with\\n assignable attributes; if this is not the case, ``TypeError`` is\\n raised. That object is then asked to assign the assigned object to\\n the given attribute; if it cannot perform the assignment, it raises\\n an exception (usually but not necessarily ``AttributeError``).\\n\\n Note: If the object is a class instance and the attribute reference\\n occurs on both sides of the assignment operator, the RHS expression,\\n ``a.x`` can access either an instance attribute or (if no instance\\n attribute exists) a class attribute. The LHS target ``a.x`` is\\n always set as an instance attribute, creating it if necessary.\\n Thus, the two occurrences of ``a.x`` do not necessarily refer to the\\n same attribute: if the RHS expression refers to a class attribute,\\n the LHS creates a new instance attribute as the target of the\\n assignment:\\n\\n class Cls:\\n x = 3 # class variable\\n inst = Cls()\\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\\n\\n This description does not necessarily apply to descriptor\\n attributes, such as properties created with ``property()``.\\n\\n* If the target is a subscription: The primary expression in the\\n reference is evaluated. It should yield either a mutable sequence\\n object (such as a list) or a mapping object (such as a dictionary).\\n Next, the subscript expression is evaluated.\\n\\n If the primary is a mutable sequence object (such as a list), the\\n subscript must yield an integer. If it is negative, the sequence\\'s\\n length is added to it. The resulting value must be a nonnegative\\n integer less than the sequence\\'s length, and the sequence is asked\\n to assign the assigned object to its item with that index. If the\\n index is out of range, ``IndexError`` is raised (assignment to a\\n subscripted sequence cannot add new items to a list).\\n\\n If the primary is a mapping object (such as a dictionary), the\\n subscript must have a type compatible with the mapping\\'s key type,\\n and the mapping is then asked to create a key/datum pair which maps\\n the subscript to the assigned object. This can either replace an\\n existing key/value pair with the same key value, or insert a new\\n key/value pair (if no key with the same value existed).\\n\\n For user-defined objects, the ``__setitem__()`` method is called\\n with appropriate arguments.\\n\\n* If the target is a slicing: The primary expression in the reference\\n is evaluated. It should yield a mutable sequence object (such as a\\n list). The assigned object should be a sequence object of the same\\n type. Next, the lower and upper bound expressions are evaluated,\\n insofar they are present; defaults are zero and the sequence\\'s\\n length. The bounds should evaluate to integers. If either bound is\\n negative, the sequence\\'s length is added to it. The resulting\\n bounds are clipped to lie between zero and the sequence\\'s length,\\n inclusive. Finally, the sequence object is asked to replace the\\n slice with the items of the assigned sequence. The length of the\\n slice may be different from the length of the assigned sequence,\\n thus changing the length of the target sequence, if the object\\n allows it.\\n\\n**CPython implementation detail:** In the current implementation, the\\nsyntax for targets is taken to be the same as for expressions, and\\ninvalid syntax is rejected during the code generation phase, causing\\nless detailed error messages.\\n\\nWARNING: Although the definition of assignment implies that overlaps\\nbetween the left-hand side and the right-hand side are \\'safe\\' (for\\nexample ``a, b = b, a`` swaps two variables), overlaps *within* the\\ncollection of assigned-to variables are not safe! For instance, the\\nfollowing program prints ``[0, 2]``:\\n\\n x = [0, 1]\\n i = 0\\n i, x[i] = 1, 2\\n print(x)\\n\\nSee also:\\n\\n **PEP 3132** - Extended Iterable Unpacking\\n The specification for the ``*target`` feature.\\n\\n\\nAugmented assignment statements\\n===============================\\n\\nAugmented assignment is the combination, in a single statement, of a\\nbinary operation and an assignment statement:\\n\\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\\n augtarget ::= identifier | attributeref | subscription | slicing\\n augop ::= \"+=\" | \"-=\" | \"*=\" | \"/=\" | \"//=\" | \"%=\" | \"**=\"\\n | \">>=\" | \"<<=\" | \"&=\" | \"^=\" | \"|=\"\\n\\n(See section *Primaries* for the syntax definitions for the last three\\nsymbols.)\\n\\nAn augmented assignment evaluates the target (which, unlike normal\\nassignment statements, cannot be an unpacking) and the expression\\nlist, performs the binary operation specific to the type of assignment\\non the two operands, and assigns the result to the original target.\\nThe target is only evaluated once.\\n\\nAn augmented assignment expression like ``x += 1`` can be rewritten as\\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\\nthe augmented version, ``x`` is only evaluated once. Also, when\\npossible, the actual operation is performed *in-place*, meaning that\\nrather than creating a new object and assigning that to the target,\\nthe old object is modified instead.\\n\\nWith the exception of assigning to tuples and multiple targets in a\\nsingle statement, the assignment done by augmented assignment\\nstatements is handled the same way as normal assignments. Similarly,\\nwith the exception of the possible *in-place* behavior, the binary\\noperation performed by augmented assignment is the same as the normal\\nbinary operations.\\n\\nFor targets which are attribute references, the same *caveat about\\nclass and instance attributes* applies as for regular assignments.\\n',\n'atom-identifiers':'\\nIdentifiers (Names)\\n*******************\\n\\nAn identifier occurring as an atom is a name. See section\\n*Identifiers and keywords* for lexical definition and section *Naming\\nand binding* for documentation of naming and binding.\\n\\nWhen the name is bound to an object, evaluation of the atom yields\\nthat object. When a name is not bound, an attempt to evaluate it\\nraises a ``NameError`` exception.\\n\\n**Private name mangling:** When an identifier that textually occurs in\\na class definition begins with two or more underscore characters and\\ndoes not end in two or more underscores, it is considered a *private\\nname* of that class. Private names are transformed to a longer form\\nbefore code is generated for them. The transformation inserts the\\nclass name in front of the name, with leading underscores removed, and\\na single underscore inserted in front of the class name. For example,\\nthe identifier ``__spam`` occurring in a class named ``Ham`` will be\\ntransformed to ``_Ham__spam``. This transformation is independent of\\nthe syntactical context in which the identifier is used. If the\\ntransformed name is extremely long (longer than 255 characters),\\nimplementation defined truncation may happen. If the class name\\nconsists only of underscores, no transformation is done.\\n',\n'atom-literals':\"\\nLiterals\\n********\\n\\nPython supports string and bytes literals and various numeric\\nliterals:\\n\\n literal ::= stringliteral | bytesliteral\\n | integer | floatnumber | imagnumber\\n\\nEvaluation of a literal yields an object of the given type (string,\\nbytes, integer, floating point number, complex number) with the given\\nvalue. The value may be approximated in the case of floating point\\nand imaginary (complex) literals. See section *Literals* for details.\\n\\nAll literals correspond to immutable data types, and hence the\\nobject's identity is less important than its value. Multiple\\nevaluations of literals with the same value (either the same\\noccurrence in the program text or a different occurrence) may obtain\\nthe same object or a different object with the same value.\\n\",\n'attribute-access':'\\nCustomizing attribute access\\n****************************\\n\\nThe following methods can be defined to customize the meaning of\\nattribute access (use of, assignment to, or deletion of ``x.name``)\\nfor class instances.\\n\\nobject.__getattr__(self, name)\\n\\n Called when an attribute lookup has not found the attribute in the\\n usual places (i.e. it is not an instance attribute nor is it found\\n in the class tree for ``self``). ``name`` is the attribute name.\\n This method should return the (computed) attribute value or raise\\n an ``AttributeError`` exception.\\n\\n Note that if the attribute is found through the normal mechanism,\\n ``__getattr__()`` is not called. (This is an intentional asymmetry\\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\\n for efficiency reasons and because otherwise ``__getattr__()``\\n would have no way to access other attributes of the instance. Note\\n that at least for instance variables, you can fake total control by\\n not inserting any values in the instance attribute dictionary (but\\n instead inserting them in another object). See the\\n ``__getattribute__()`` method below for a way to actually get total\\n control over attribute access.\\n\\nobject.__getattribute__(self, name)\\n\\n Called unconditionally to implement attribute accesses for\\n instances of the class. If the class also defines\\n ``__getattr__()``, the latter will not be called unless\\n ``__getattribute__()`` either calls it explicitly or raises an\\n ``AttributeError``. This method should return the (computed)\\n attribute value or raise an ``AttributeError`` exception. In order\\n to avoid infinite recursion in this method, its implementation\\n should always call the base class method with the same name to\\n access any attributes it needs, for example,\\n ``object.__getattribute__(self, name)``.\\n\\n Note: This method may still be bypassed when looking up special methods\\n as the result of implicit invocation via language syntax or\\n built-in functions. See *Special method lookup*.\\n\\nobject.__setattr__(self, name, value)\\n\\n Called when an attribute assignment is attempted. This is called\\n instead of the normal mechanism (i.e. store the value in the\\n instance dictionary). *name* is the attribute name, *value* is the\\n value to be assigned to it.\\n\\n If ``__setattr__()`` wants to assign to an instance attribute, it\\n should call the base class method with the same name, for example,\\n ``object.__setattr__(self, name, value)``.\\n\\nobject.__delattr__(self, name)\\n\\n Like ``__setattr__()`` but for attribute deletion instead of\\n assignment. This should only be implemented if ``del obj.name`` is\\n meaningful for the object.\\n\\nobject.__dir__(self)\\n\\n Called when ``dir()`` is called on the object. A sequence must be\\n returned. ``dir()`` converts the returned sequence to a list and\\n sorts it.\\n\\n\\nImplementing Descriptors\\n========================\\n\\nThe following methods only apply when an instance of the class\\ncontaining the method (a so-called *descriptor* class) appears in an\\n*owner* class (the descriptor must be in either the owner\\'s class\\ndictionary or in the class dictionary for one of its parents). In the\\nexamples below, \"the attribute\" refers to the attribute whose name is\\nthe key of the property in the owner class\\' ``__dict__``.\\n\\nobject.__get__(self, instance, owner)\\n\\n Called to get the attribute of the owner class (class attribute\\n access) or of an instance of that class (instance attribute\\n access). *owner* is always the owner class, while *instance* is the\\n instance that the attribute was accessed through, or ``None`` when\\n the attribute is accessed through the *owner*. This method should\\n return the (computed) attribute value or raise an\\n ``AttributeError`` exception.\\n\\nobject.__set__(self, instance, value)\\n\\n Called to set the attribute on an instance *instance* of the owner\\n class to a new value, *value*.\\n\\nobject.__delete__(self, instance)\\n\\n Called to delete the attribute on an instance *instance* of the\\n owner class.\\n\\n\\nInvoking Descriptors\\n====================\\n\\nIn general, a descriptor is an object attribute with \"binding\\nbehavior\", one whose attribute access has been overridden by methods\\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\\n``__delete__()``. If any of those methods are defined for an object,\\nit is said to be a descriptor.\\n\\nThe default behavior for attribute access is to get, set, or delete\\nthe attribute from an object\\'s dictionary. For instance, ``a.x`` has a\\nlookup chain starting with ``a.__dict__[\\'x\\']``, then\\n``type(a).__dict__[\\'x\\']``, and continuing through the base classes of\\n``type(a)`` excluding metaclasses.\\n\\nHowever, if the looked-up value is an object defining one of the\\ndescriptor methods, then Python may override the default behavior and\\ninvoke the descriptor method instead. Where this occurs in the\\nprecedence chain depends on which descriptor methods were defined and\\nhow they were called.\\n\\nThe starting point for descriptor invocation is a binding, ``a.x``.\\nHow the arguments are assembled depends on ``a``:\\n\\nDirect Call\\n The simplest and least common call is when user code directly\\n invokes a descriptor method: ``x.__get__(a)``.\\n\\nInstance Binding\\n If binding to an object instance, ``a.x`` is transformed into the\\n call: ``type(a).__dict__[\\'x\\'].__get__(a, type(a))``.\\n\\nClass Binding\\n If binding to a class, ``A.x`` is transformed into the call:\\n ``A.__dict__[\\'x\\'].__get__(None, A)``.\\n\\nSuper Binding\\n If ``a`` is an instance of ``super``, then the binding ``super(B,\\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\\n ``A`` immediately preceding ``B`` and then invokes the descriptor\\n with the call: ``A.__dict__[\\'m\\'].__get__(obj, obj.__class__)``.\\n\\nFor instance bindings, the precedence of descriptor invocation depends\\non the which descriptor methods are defined. A descriptor can define\\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\\nIf it does not define ``__get__()``, then accessing the attribute will\\nreturn the descriptor object itself unless there is a value in the\\nobject\\'s instance dictionary. If the descriptor defines ``__set__()``\\nand/or ``__delete__()``, it is a data descriptor; if it defines\\nneither, it is a non-data descriptor. Normally, data descriptors\\ndefine both ``__get__()`` and ``__set__()``, while non-data\\ndescriptors have just the ``__get__()`` method. Data descriptors with\\n``__set__()`` and ``__get__()`` defined always override a redefinition\\nin an instance dictionary. In contrast, non-data descriptors can be\\noverridden by instances.\\n\\nPython methods (including ``staticmethod()`` and ``classmethod()``)\\nare implemented as non-data descriptors. Accordingly, instances can\\nredefine and override methods. This allows individual instances to\\nacquire behaviors that differ from other instances of the same class.\\n\\nThe ``property()`` function is implemented as a data descriptor.\\nAccordingly, instances cannot override the behavior of a property.\\n\\n\\n__slots__\\n=========\\n\\nBy default, instances of classes have a dictionary for attribute\\nstorage. This wastes space for objects having very few instance\\nvariables. The space consumption can become acute when creating large\\nnumbers of instances.\\n\\nThe default can be overridden by defining *__slots__* in a class\\ndefinition. The *__slots__* declaration takes a sequence of instance\\nvariables and reserves just enough space in each instance to hold a\\nvalue for each variable. Space is saved because *__dict__* is not\\ncreated for each instance.\\n\\nobject.__slots__\\n\\n This class variable can be assigned a string, iterable, or sequence\\n of strings with variable names used by instances. If defined in a\\n class, *__slots__* reserves space for the declared variables and\\n prevents the automatic creation of *__dict__* and *__weakref__* for\\n each instance.\\n\\n\\nNotes on using *__slots__*\\n--------------------------\\n\\n* When inheriting from a class without *__slots__*, the *__dict__*\\n attribute of that class will always be accessible, so a *__slots__*\\n definition in the subclass is meaningless.\\n\\n* Without a *__dict__* variable, instances cannot be assigned new\\n variables not listed in the *__slots__* definition. Attempts to\\n assign to an unlisted variable name raises ``AttributeError``. If\\n dynamic assignment of new variables is desired, then add\\n ``\\'__dict__\\'`` to the sequence of strings in the *__slots__*\\n declaration.\\n\\n* Without a *__weakref__* variable for each instance, classes defining\\n *__slots__* do not support weak references to its instances. If weak\\n reference support is needed, then add ``\\'__weakref__\\'`` to the\\n sequence of strings in the *__slots__* declaration.\\n\\n* *__slots__* are implemented at the class level by creating\\n descriptors (*Implementing Descriptors*) for each variable name. As\\n a result, class attributes cannot be used to set default values for\\n instance variables defined by *__slots__*; otherwise, the class\\n attribute would overwrite the descriptor assignment.\\n\\n* The action of a *__slots__* declaration is limited to the class\\n where it is defined. As a result, subclasses will have a *__dict__*\\n unless they also define *__slots__* (which must only contain names\\n of any *additional* slots).\\n\\n* If a class defines a slot also defined in a base class, the instance\\n variable defined by the base class slot is inaccessible (except by\\n retrieving its descriptor directly from the base class). This\\n renders the meaning of the program undefined. In the future, a\\n check may be added to prevent this.\\n\\n* Nonempty *__slots__* does not work for classes derived from\\n \"variable-length\" built-in types such as ``int``, ``str`` and\\n ``tuple``.\\n\\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\\n also be used; however, in the future, special meaning may be\\n assigned to the values corresponding to each key.\\n\\n* *__class__* assignment works only if both classes have the same\\n *__slots__*.\\n',\n'attribute-references':'\\nAttribute references\\n********************\\n\\nAn attribute reference is a primary followed by a period and a name:\\n\\n attributeref ::= primary \".\" identifier\\n\\nThe primary must evaluate to an object of a type that supports\\nattribute references, which most objects do. This object is then\\nasked to produce the attribute whose name is the identifier (which can\\nbe customized by overriding the ``__getattr__()`` method). If this\\nattribute is not available, the exception ``AttributeError`` is\\nraised. Otherwise, the type and value of the object produced is\\ndetermined by the object. Multiple evaluations of the same attribute\\nreference may yield different objects.\\n',\n'augassign':'\\nAugmented assignment statements\\n*******************************\\n\\nAugmented assignment is the combination, in a single statement, of a\\nbinary operation and an assignment statement:\\n\\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\\n augtarget ::= identifier | attributeref | subscription | slicing\\n augop ::= \"+=\" | \"-=\" | \"*=\" | \"/=\" | \"//=\" | \"%=\" | \"**=\"\\n | \">>=\" | \"<<=\" | \"&=\" | \"^=\" | \"|=\"\\n\\n(See section *Primaries* for the syntax definitions for the last three\\nsymbols.)\\n\\nAn augmented assignment evaluates the target (which, unlike normal\\nassignment statements, cannot be an unpacking) and the expression\\nlist, performs the binary operation specific to the type of assignment\\non the two operands, and assigns the result to the original target.\\nThe target is only evaluated once.\\n\\nAn augmented assignment expression like ``x += 1`` can be rewritten as\\n``x = x + 1`` to achieve a similar, but not exactly equal effect. In\\nthe augmented version, ``x`` is only evaluated once. Also, when\\npossible, the actual operation is performed *in-place*, meaning that\\nrather than creating a new object and assigning that to the target,\\nthe old object is modified instead.\\n\\nWith the exception of assigning to tuples and multiple targets in a\\nsingle statement, the assignment done by augmented assignment\\nstatements is handled the same way as normal assignments. Similarly,\\nwith the exception of the possible *in-place* behavior, the binary\\noperation performed by augmented assignment is the same as the normal\\nbinary operations.\\n\\nFor targets which are attribute references, the same *caveat about\\nclass and instance attributes* applies as for regular assignments.\\n',\n'binary':'\\nBinary arithmetic operations\\n****************************\\n\\nThe binary arithmetic operations have the conventional priority\\nlevels. Note that some of these operations also apply to certain non-\\nnumeric types. Apart from the power operator, there are only two\\nlevels, one for multiplicative operators and one for additive\\noperators:\\n\\n m_expr ::= u_expr | m_expr \"*\" u_expr | m_expr \"//\" u_expr | m_expr \"/\" u_expr\\n | m_expr \"%\" u_expr\\n a_expr ::= m_expr | a_expr \"+\" m_expr | a_expr \"-\" m_expr\\n\\nThe ``*`` (multiplication) operator yields the product of its\\narguments. The arguments must either both be numbers, or one argument\\nmust be an integer and the other must be a sequence. In the former\\ncase, the numbers are converted to a common type and then multiplied\\ntogether. In the latter case, sequence repetition is performed; a\\nnegative repetition factor yields an empty sequence.\\n\\nThe ``/`` (division) and ``//`` (floor division) operators yield the\\nquotient of their arguments. The numeric arguments are first\\nconverted to a common type. Integer division yields a float, while\\nfloor division of integers results in an integer; the result is that\\nof mathematical division with the \\'floor\\' function applied to the\\nresult. Division by zero raises the ``ZeroDivisionError`` exception.\\n\\nThe ``%`` (modulo) operator yields the remainder from the division of\\nthe first argument by the second. The numeric arguments are first\\nconverted to a common type. A zero right argument raises the\\n``ZeroDivisionError`` exception. The arguments may be floating point\\nnumbers, e.g., ``3.14%0.7`` equals ``0.34`` (since ``3.14`` equals\\n``4*0.7 + 0.34``.) The modulo operator always yields a result with\\nthe same sign as its second operand (or zero); the absolute value of\\nthe result is strictly smaller than the absolute value of the second\\noperand [1].\\n\\nThe floor division and modulo operators are connected by the following\\nidentity: ``x == (x//y)*y + (x%y)``. Floor division and modulo are\\nalso connected with the built-in function ``divmod()``: ``divmod(x, y)\\n== (x//y, x%y)``. [2].\\n\\nIn addition to performing the modulo operation on numbers, the ``%``\\noperator is also overloaded by string objects to perform old-style\\nstring formatting (also known as interpolation). The syntax for\\nstring formatting is described in the Python Library Reference,\\nsection *printf-style String Formatting*.\\n\\nThe floor division operator, the modulo operator, and the ``divmod()``\\nfunction are not defined for complex numbers. Instead, convert to a\\nfloating point number using the ``abs()`` function if appropriate.\\n\\nThe ``+`` (addition) operator yields the sum of its arguments. The\\narguments must either both be numbers or both sequences of the same\\ntype. In the former case, the numbers are converted to a common type\\nand then added together. In the latter case, the sequences are\\nconcatenated.\\n\\nThe ``-`` (subtraction) operator yields the difference of its\\narguments. The numeric arguments are first converted to a common\\ntype.\\n',\n'bitwise':'\\nBinary bitwise operations\\n*************************\\n\\nEach of the three bitwise operations has a different priority level:\\n\\n and_expr ::= shift_expr | and_expr \"&\" shift_expr\\n xor_expr ::= and_expr | xor_expr \"^\" and_expr\\n or_expr ::= xor_expr | or_expr \"|\" xor_expr\\n\\nThe ``&`` operator yields the bitwise AND of its arguments, which must\\nbe integers.\\n\\nThe ``^`` operator yields the bitwise XOR (exclusive OR) of its\\narguments, which must be integers.\\n\\nThe ``|`` operator yields the bitwise (inclusive) OR of its arguments,\\nwhich must be integers.\\n',\n'bltin-code-objects':'\\nCode Objects\\n************\\n\\nCode objects are used by the implementation to represent \"pseudo-\\ncompiled\" executable Python code such as a function body. They differ\\nfrom function objects because they don\\'t contain a reference to their\\nglobal execution environment. Code objects are returned by the built-\\nin ``compile()`` function and can be extracted from function objects\\nthrough their ``__code__`` attribute. See also the ``code`` module.\\n\\nA code object can be executed or evaluated by passing it (instead of a\\nsource string) to the ``exec()`` or ``eval()`` built-in functions.\\n\\nSee *The standard type hierarchy* for more information.\\n',\n'bltin-ellipsis-object':'\\nThe Ellipsis Object\\n*******************\\n\\nThis object is commonly used by slicing (see *Slicings*). It supports\\nno special operations. There is exactly one ellipsis object, named\\n``Ellipsis`` (a built-in name). ``type(Ellipsis)()`` produces the\\n``Ellipsis`` singleton.\\n\\nIt is written as ``Ellipsis`` or ``...``.\\n',\n'bltin-null-object':\"\\nThe Null Object\\n***************\\n\\nThis object is returned by functions that don't explicitly return a\\nvalue. It supports no special operations. There is exactly one null\\nobject, named ``None`` (a built-in name). ``type(None)()`` produces\\nthe same singleton.\\n\\nIt is written as ``None``.\\n\",\n'bltin-type-objects':\"\\nType Objects\\n************\\n\\nType objects represent the various object types. An object's type is\\naccessed by the built-in function ``type()``. There are no special\\noperations on types. The standard module ``types`` defines names for\\nall standard built-in types.\\n\\nTypes are written like this: ````.\\n\",\n'booleans':'\\nBoolean operations\\n******************\\n\\n or_test ::= and_test | or_test \"or\" and_test\\n and_test ::= not_test | and_test \"and\" not_test\\n not_test ::= comparison | \"not\" not_test\\n\\nIn the context of Boolean operations, and also when expressions are\\nused by control flow statements, the following values are interpreted\\nas false: ``False``, ``None``, numeric zero of all types, and empty\\nstrings and containers (including strings, tuples, lists,\\ndictionaries, sets and frozensets). All other values are interpreted\\nas true. User-defined objects can customize their truth value by\\nproviding a ``__bool__()`` method.\\n\\nThe operator ``not`` yields ``True`` if its argument is false,\\n``False`` otherwise.\\n\\nThe expression ``x and y`` first evaluates *x*; if *x* is false, its\\nvalue is returned; otherwise, *y* is evaluated and the resulting value\\nis returned.\\n\\nThe expression ``x or y`` first evaluates *x*; if *x* is true, its\\nvalue is returned; otherwise, *y* is evaluated and the resulting value\\nis returned.\\n\\n(Note that neither ``and`` nor ``or`` restrict the value and type they\\nreturn to ``False`` and ``True``, but rather return the last evaluated\\nargument. This is sometimes useful, e.g., if ``s`` is a string that\\nshould be replaced by a default value if it is empty, the expression\\n``s or \\'foo\\'`` yields the desired value. Because ``not`` has to\\ninvent a value anyway, it does not bother to return a value of the\\nsame type as its argument, so e.g., ``not \\'foo\\'`` yields ``False``,\\nnot ``\\'\\'``.)\\n',\n'break':'\\nThe ``break`` statement\\n***********************\\n\\n break_stmt ::= \"break\"\\n\\n``break`` may only occur syntactically nested in a ``for`` or\\n``while`` loop, but not nested in a function or class definition\\nwithin that loop.\\n\\nIt terminates the nearest enclosing loop, skipping the optional\\n``else`` clause if the loop has one.\\n\\nIf a ``for`` loop is terminated by ``break``, the loop control target\\nkeeps its current value.\\n\\nWhen ``break`` passes control out of a ``try`` statement with a\\n``finally`` clause, that ``finally`` clause is executed before really\\nleaving the loop.\\n',\n'callable-types':'\\nEmulating callable objects\\n**************************\\n\\nobject.__call__(self[, args...])\\n\\n Called when the instance is \"called\" as a function; if this method\\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\\n ``x.__call__(arg1, arg2, ...)``.\\n',\n'calls':'\\nCalls\\n*****\\n\\nA call calls a callable object (e.g., a *function*) with a possibly\\nempty series of *arguments*:\\n\\n call ::= primary \"(\" [argument_list [\",\"] | comprehension] \")\"\\n argument_list ::= positional_arguments [\",\" keyword_arguments]\\n [\",\" \"*\" expression] [\",\" keyword_arguments]\\n [\",\" \"**\" expression]\\n | keyword_arguments [\",\" \"*\" expression]\\n [\",\" keyword_arguments] [\",\" \"**\" expression]\\n | \"*\" expression [\",\" keyword_arguments] [\",\" \"**\" expression]\\n | \"**\" expression\\n positional_arguments ::= expression (\",\" expression)*\\n keyword_arguments ::= keyword_item (\",\" keyword_item)*\\n keyword_item ::= identifier \"=\" expression\\n\\nA trailing comma may be present after the positional and keyword\\narguments but does not affect the semantics.\\n\\nThe primary must evaluate to a callable object (user-defined\\nfunctions, built-in functions, methods of built-in objects, class\\nobjects, methods of class instances, and all objects having a\\n``__call__()`` method are callable). All argument expressions are\\nevaluated before the call is attempted. Please refer to section\\n*Function definitions* for the syntax of formal *parameter* lists.\\n\\nIf keyword arguments are present, they are first converted to\\npositional arguments, as follows. First, a list of unfilled slots is\\ncreated for the formal parameters. If there are N positional\\narguments, they are placed in the first N slots. Next, for each\\nkeyword argument, the identifier is used to determine the\\ncorresponding slot (if the identifier is the same as the first formal\\nparameter name, the first slot is used, and so on). If the slot is\\nalready filled, a ``TypeError`` exception is raised. Otherwise, the\\nvalue of the argument is placed in the slot, filling it (even if the\\nexpression is ``None``, it fills the slot). When all arguments have\\nbeen processed, the slots that are still unfilled are filled with the\\ncorresponding default value from the function definition. (Default\\nvalues are calculated, once, when the function is defined; thus, a\\nmutable object such as a list or dictionary used as default value will\\nbe shared by all calls that don\\'t specify an argument value for the\\ncorresponding slot; this should usually be avoided.) If there are any\\nunfilled slots for which no default value is specified, a\\n``TypeError`` exception is raised. Otherwise, the list of filled\\nslots is used as the argument list for the call.\\n\\n**CPython implementation detail:** An implementation may provide\\nbuilt-in functions whose positional parameters do not have names, even\\nif they are \\'named\\' for the purpose of documentation, and which\\ntherefore cannot be supplied by keyword. In CPython, this is the case\\nfor functions implemented in C that use ``PyArg_ParseTuple()`` to\\nparse their arguments.\\n\\nIf there are more positional arguments than there are formal parameter\\nslots, a ``TypeError`` exception is raised, unless a formal parameter\\nusing the syntax ``*identifier`` is present; in this case, that formal\\nparameter receives a tuple containing the excess positional arguments\\n(or an empty tuple if there were no excess positional arguments).\\n\\nIf any keyword argument does not correspond to a formal parameter\\nname, a ``TypeError`` exception is raised, unless a formal parameter\\nusing the syntax ``**identifier`` is present; in this case, that\\nformal parameter receives a dictionary containing the excess keyword\\narguments (using the keywords as keys and the argument values as\\ncorresponding values), or a (new) empty dictionary if there were no\\nexcess keyword arguments.\\n\\nIf the syntax ``*expression`` appears in the function call,\\n``expression`` must evaluate to an iterable. Elements from this\\niterable are treated as if they were additional positional arguments;\\nif there are positional arguments *x1*, ..., *xN*, and ``expression``\\nevaluates to a sequence *y1*, ..., *yM*, this is equivalent to a call\\nwith M+N positional arguments *x1*, ..., *xN*, *y1*, ..., *yM*.\\n\\nA consequence of this is that although the ``*expression`` syntax may\\nappear *after* some keyword arguments, it is processed *before* the\\nkeyword arguments (and the ``**expression`` argument, if any -- see\\nbelow). So:\\n\\n >>> def f(a, b):\\n ... print(a, b)\\n ...\\n >>> f(b=1, *(2,))\\n 2 1\\n >>> f(a=1, *(2,))\\n Traceback (most recent call last):\\n File \"\", line 1, in ?\\n TypeError: f() got multiple values for keyword argument \\'a\\'\\n >>> f(1, *(2,))\\n 1 2\\n\\nIt is unusual for both keyword arguments and the ``*expression``\\nsyntax to be used in the same call, so in practice this confusion does\\nnot arise.\\n\\nIf the syntax ``**expression`` appears in the function call,\\n``expression`` must evaluate to a mapping, the contents of which are\\ntreated as additional keyword arguments. In the case of a keyword\\nappearing in both ``expression`` and as an explicit keyword argument,\\na ``TypeError`` exception is raised.\\n\\nFormal parameters using the syntax ``*identifier`` or ``**identifier``\\ncannot be used as positional argument slots or as keyword argument\\nnames.\\n\\nA call always returns some value, possibly ``None``, unless it raises\\nan exception. How this value is computed depends on the type of the\\ncallable object.\\n\\nIf it is---\\n\\na user-defined function:\\n The code block for the function is executed, passing it the\\n argument list. The first thing the code block will do is bind the\\n formal parameters to the arguments; this is described in section\\n *Function definitions*. When the code block executes a ``return``\\n statement, this specifies the return value of the function call.\\n\\na built-in function or method:\\n The result is up to the interpreter; see *Built-in Functions* for\\n the descriptions of built-in functions and methods.\\n\\na class object:\\n A new instance of that class is returned.\\n\\na class instance method:\\n The corresponding user-defined function is called, with an argument\\n list that is one longer than the argument list of the call: the\\n instance becomes the first argument.\\n\\na class instance:\\n The class must define a ``__call__()`` method; the effect is then\\n the same as if that method was called.\\n',\n'class':'\\nClass definitions\\n*****************\\n\\nA class definition defines a class object (see section *The standard\\ntype hierarchy*):\\n\\n classdef ::= [decorators] \"class\" classname [inheritance] \":\" suite\\n inheritance ::= \"(\" [parameter_list] \")\"\\n classname ::= identifier\\n\\nA class definition is an executable statement. The inheritance list\\nusually gives a list of base classes (see *Customizing class creation*\\nfor more advanced uses), so each item in the list should evaluate to a\\nclass object which allows subclassing. Classes without an inheritance\\nlist inherit, by default, from the base class ``object``; hence,\\n\\n class Foo:\\n pass\\n\\nis equivalent to\\n\\n class Foo(object):\\n pass\\n\\nThe class\\'s suite is then executed in a new execution frame (see\\n*Naming and binding*), using a newly created local namespace and the\\noriginal global namespace. (Usually, the suite contains mostly\\nfunction definitions.) When the class\\'s suite finishes execution, its\\nexecution frame is discarded but its local namespace is saved. [4] A\\nclass object is then created using the inheritance list for the base\\nclasses and the saved local namespace for the attribute dictionary.\\nThe class name is bound to this class object in the original local\\nnamespace.\\n\\nClass creation can be customized heavily using *metaclasses*.\\n\\nClasses can also be decorated: just like when decorating functions,\\n\\n @f1(arg)\\n @f2\\n class Foo: pass\\n\\nis equivalent to\\n\\n class Foo: pass\\n Foo = f1(arg)(f2(Foo))\\n\\nThe evaluation rules for the decorator expressions are the same as for\\nfunction decorators. The result must be a class object, which is then\\nbound to the class name.\\n\\n**Programmer\\'s note:** Variables defined in the class definition are\\nclass attributes; they are shared by instances. Instance attributes\\ncan be set in a method with ``self.name = value``. Both class and\\ninstance attributes are accessible through the notation\\n\"``self.name``\", and an instance attribute hides a class attribute\\nwith the same name when accessed in this way. Class attributes can be\\nused as defaults for instance attributes, but using mutable values\\nthere can lead to unexpected results. *Descriptors* can be used to\\ncreate instance variables with different implementation details.\\n\\nSee also:\\n\\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\\n Decorators\\n\\n-[ Footnotes ]-\\n\\n[1] The exception is propagated to the invocation stack unless there\\n is a ``finally`` clause which happens to raise another exception.\\n That new exception causes the old one to be lost.\\n\\n[2] Currently, control \"flows off the end\" except in the case of an\\n exception or the execution of a ``return``, ``continue``, or\\n ``break`` statement.\\n\\n[3] A string literal appearing as the first statement in the function\\n body is transformed into the function\\'s ``__doc__`` attribute and\\n therefore the function\\'s *docstring*.\\n\\n[4] A string literal appearing as the first statement in the class\\n body is transformed into the namespace\\'s ``__doc__`` item and\\n therefore the class\\'s *docstring*.\\n',\n'comparisons':'\\nComparisons\\n***********\\n\\nUnlike C, all comparison operations in Python have the same priority,\\nwhich is lower than that of any arithmetic, shifting or bitwise\\noperation. Also unlike C, expressions like ``a < b < c`` have the\\ninterpretation that is conventional in mathematics:\\n\\n comparison ::= or_expr ( comp_operator or_expr )*\\n comp_operator ::= \"<\" | \">\" | \"==\" | \">=\" | \"<=\" | \"!=\"\\n | \"is\" [\"not\"] | [\"not\"] \"in\"\\n\\nComparisons yield boolean values: ``True`` or ``False``.\\n\\nComparisons can be chained arbitrarily, e.g., ``x < y <= z`` is\\nequivalent to ``x < y and y <= z``, except that ``y`` is evaluated\\nonly once (but in both cases ``z`` is not evaluated at all when ``x <\\ny`` is found to be false).\\n\\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\\n*op2*, ..., *opN* are comparison operators, then ``a op1 b op2 c ... y\\nopN z`` is equivalent to ``a op1 b and b op2 c and ... y opN z``,\\nexcept that each expression is evaluated at most once.\\n\\nNote that ``a op1 b op2 c`` doesn\\'t imply any kind of comparison\\nbetween *a* and *c*, so that, e.g., ``x < y > z`` is perfectly legal\\n(though perhaps not pretty).\\n\\nThe operators ``<``, ``>``, ``==``, ``>=``, ``<=``, and ``!=`` compare\\nthe values of two objects. The objects need not have the same type.\\nIf both are numbers, they are converted to a common type. Otherwise,\\nthe ``==`` and ``!=`` operators *always* consider objects of different\\ntypes to be unequal, while the ``<``, ``>``, ``>=`` and ``<=``\\noperators raise a ``TypeError`` when comparing objects of different\\ntypes that do not implement these operators for the given pair of\\ntypes. You can control comparison behavior of objects of non-built-in\\ntypes by defining rich comparison methods like ``__gt__()``, described\\nin section *Basic customization*.\\n\\nComparison of objects of the same type depends on the type:\\n\\n* Numbers are compared arithmetically.\\n\\n* The values ``float(\\'NaN\\')`` and ``Decimal(\\'NaN\\')`` are special. The\\n are identical to themselves, ``x is x`` but are not equal to\\n themselves, ``x != x``. Additionally, comparing any value to a\\n not-a-number value will return ``False``. For example, both ``3 <\\n float(\\'NaN\\')`` and ``float(\\'NaN\\') < 3`` will return ``False``.\\n\\n* Bytes objects are compared lexicographically using the numeric\\n values of their elements.\\n\\n* Strings are compared lexicographically using the numeric equivalents\\n (the result of the built-in function ``ord()``) of their characters.\\n [3] String and bytes object can\\'t be compared!\\n\\n* Tuples and lists are compared lexicographically using comparison of\\n corresponding elements. This means that to compare equal, each\\n element must compare equal and the two sequences must be of the same\\n type and have the same length.\\n\\n If not equal, the sequences are ordered the same as their first\\n differing elements. For example, ``[1,2,x] <= [1,2,y]`` has the\\n same value as ``x <= y``. If the corresponding element does not\\n exist, the shorter sequence is ordered first (for example, ``[1,2] <\\n [1,2,3]``).\\n\\n* Mappings (dictionaries) compare equal if and only if they have the\\n same ``(key, value)`` pairs. Order comparisons ``(\\'<\\', \\'<=\\', \\'>=\\',\\n \\'>\\')`` raise ``TypeError``.\\n\\n* Sets and frozensets define comparison operators to mean subset and\\n superset tests. Those relations do not define total orderings (the\\n two sets ``{1,2}`` and {2,3} are not equal, nor subsets of one\\n another, nor supersets of one another). Accordingly, sets are not\\n appropriate arguments for functions which depend on total ordering.\\n For example, ``min()``, ``max()``, and ``sorted()`` produce\\n undefined results given a list of sets as inputs.\\n\\n* Most other objects of built-in types compare unequal unless they are\\n the same object; the choice whether one object is considered smaller\\n or larger than another one is made arbitrarily but consistently\\n within one execution of a program.\\n\\nComparison of objects of the differing types depends on whether either\\nof the types provide explicit support for the comparison. Most\\nnumeric types can be compared with one another. When cross-type\\ncomparison is not supported, the comparison method returns\\n``NotImplemented``.\\n\\nThe operators ``in`` and ``not in`` test for membership. ``x in s``\\nevaluates to true if *x* is a member of *s*, and false otherwise. ``x\\nnot in s`` returns the negation of ``x in s``. All built-in sequences\\nand set types support this as well as dictionary, for which ``in``\\ntests whether a the dictionary has a given key. For container types\\nsuch as list, tuple, set, frozenset, dict, or collections.deque, the\\nexpression ``x in y`` is equivalent to ``any(x is e or x == e for e in\\ny)``.\\n\\nFor the string and bytes types, ``x in y`` is true if and only if *x*\\nis a substring of *y*. An equivalent test is ``y.find(x) != -1``.\\nEmpty strings are always considered to be a substring of any other\\nstring, so ``\"\" in \"abc\"`` will return ``True``.\\n\\nFor user-defined classes which define the ``__contains__()`` method,\\n``x in y`` is true if and only if ``y.__contains__(x)`` is true.\\n\\nFor user-defined classes which do not define ``__contains__()`` but do\\ndefine ``__iter__()``, ``x in y`` is true if some value ``z`` with ``x\\n== z`` is produced while iterating over ``y``. If an exception is\\nraised during the iteration, it is as if ``in`` raised that exception.\\n\\nLastly, the old-style iteration protocol is tried: if a class defines\\n``__getitem__()``, ``x in y`` is true if and only if there is a non-\\nnegative integer index *i* such that ``x == y[i]``, and all lower\\ninteger indices do not raise ``IndexError`` exception. (If any other\\nexception is raised, it is as if ``in`` raised that exception).\\n\\nThe operator ``not in`` is defined to have the inverse true value of\\n``in``.\\n\\nThe operators ``is`` and ``is not`` test for object identity: ``x is\\ny`` is true if and only if *x* and *y* are the same object. ``x is\\nnot y`` yields the inverse truth value. [4]\\n',\n'compound':'\\nCompound statements\\n*******************\\n\\nCompound statements contain (groups of) other statements; they affect\\nor control the execution of those other statements in some way. In\\ngeneral, compound statements span multiple lines, although in simple\\nincarnations a whole compound statement may be contained in one line.\\n\\nThe ``if``, ``while`` and ``for`` statements implement traditional\\ncontrol flow constructs. ``try`` specifies exception handlers and/or\\ncleanup code for a group of statements, while the ``with`` statement\\nallows the execution of initialization and finalization code around a\\nblock of code. Function and class definitions are also syntactically\\ncompound statements.\\n\\nCompound statements consist of one or more \\'clauses.\\' A clause\\nconsists of a header and a \\'suite.\\' The clause headers of a\\nparticular compound statement are all at the same indentation level.\\nEach clause header begins with a uniquely identifying keyword and ends\\nwith a colon. A suite is a group of statements controlled by a\\nclause. A suite can be one or more semicolon-separated simple\\nstatements on the same line as the header, following the header\\'s\\ncolon, or it can be one or more indented statements on subsequent\\nlines. Only the latter form of suite can contain nested compound\\nstatements; the following is illegal, mostly because it wouldn\\'t be\\nclear to which ``if`` clause a following ``else`` clause would belong:\\n\\n if test1: if test2: print(x)\\n\\nAlso note that the semicolon binds tighter than the colon in this\\ncontext, so that in the following example, either all or none of the\\n``print()`` calls are executed:\\n\\n if x < y < z: print(x); print(y); print(z)\\n\\nSummarizing:\\n\\n compound_stmt ::= if_stmt\\n | while_stmt\\n | for_stmt\\n | try_stmt\\n | with_stmt\\n | funcdef\\n | classdef\\n suite ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT\\n statement ::= stmt_list NEWLINE | compound_stmt\\n stmt_list ::= simple_stmt (\";\" simple_stmt)* [\";\"]\\n\\nNote that statements always end in a ``NEWLINE`` possibly followed by\\na ``DEDENT``. Also note that optional continuation clauses always\\nbegin with a keyword that cannot start a statement, thus there are no\\nambiguities (the \\'dangling ``else``\\' problem is solved in Python by\\nrequiring nested ``if`` statements to be indented).\\n\\nThe formatting of the grammar rules in the following sections places\\neach clause on a separate line for clarity.\\n\\n\\nThe ``if`` statement\\n====================\\n\\nThe ``if`` statement is used for conditional execution:\\n\\n if_stmt ::= \"if\" expression \":\" suite\\n ( \"elif\" expression \":\" suite )*\\n [\"else\" \":\" suite]\\n\\nIt selects exactly one of the suites by evaluating the expressions one\\nby one until one is found to be true (see section *Boolean operations*\\nfor the definition of true and false); then that suite is executed\\n(and no other part of the ``if`` statement is executed or evaluated).\\nIf all expressions are false, the suite of the ``else`` clause, if\\npresent, is executed.\\n\\n\\nThe ``while`` statement\\n=======================\\n\\nThe ``while`` statement is used for repeated execution as long as an\\nexpression is true:\\n\\n while_stmt ::= \"while\" expression \":\" suite\\n [\"else\" \":\" suite]\\n\\nThis repeatedly tests the expression and, if it is true, executes the\\nfirst suite; if the expression is false (which may be the first time\\nit is tested) the suite of the ``else`` clause, if present, is\\nexecuted and the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ngoes back to testing the expression.\\n\\n\\nThe ``for`` statement\\n=====================\\n\\nThe ``for`` statement is used to iterate over the elements of a\\nsequence (such as a string, tuple or list) or other iterable object:\\n\\n for_stmt ::= \"for\" target_list \"in\" expression_list \":\" suite\\n [\"else\" \":\" suite]\\n\\nThe expression list is evaluated once; it should yield an iterable\\nobject. An iterator is created for the result of the\\n``expression_list``. The suite is then executed once for each item\\nprovided by the iterator, in the order of ascending indices. Each\\nitem in turn is assigned to the target list using the standard rules\\nfor assignments (see *Assignment statements*), and then the suite is\\nexecuted. When the items are exhausted (which is immediately when the\\nsequence is empty or an iterator raises a ``StopIteration``\\nexception), the suite in the ``else`` clause, if present, is executed,\\nand the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ncontinues with the next item, or with the ``else`` clause if there was\\nno next item.\\n\\nThe suite may assign to the variable(s) in the target list; this does\\nnot affect the next item assigned to it.\\n\\nNames in the target list are not deleted when the loop is finished,\\nbut if the sequence is empty, it will not have been assigned to at all\\nby the loop. Hint: the built-in function ``range()`` returns an\\niterator of integers suitable to emulate the effect of Pascal\\'s ``for\\ni := a to b do``; e.g., ``list(range(3))`` returns the list ``[0, 1,\\n2]``.\\n\\nNote: There is a subtlety when the sequence is being modified by the loop\\n (this can only occur for mutable sequences, i.e. lists). An\\n internal counter is used to keep track of which item is used next,\\n and this is incremented on each iteration. When this counter has\\n reached the length of the sequence the loop terminates. This means\\n that if the suite deletes the current (or a previous) item from the\\n sequence, the next item will be skipped (since it gets the index of\\n the current item which has already been treated). Likewise, if the\\n suite inserts an item in the sequence before the current item, the\\n current item will be treated again the next time through the loop.\\n This can lead to nasty bugs that can be avoided by making a\\n temporary copy using a slice of the whole sequence, e.g.,\\n\\n for x in a[:]:\\n if x < 0: a.remove(x)\\n\\n\\nThe ``try`` statement\\n=====================\\n\\nThe ``try`` statement specifies exception handlers and/or cleanup code\\nfor a group of statements:\\n\\n try_stmt ::= try1_stmt | try2_stmt\\n try1_stmt ::= \"try\" \":\" suite\\n (\"except\" [expression [\"as\" target]] \":\" suite)+\\n [\"else\" \":\" suite]\\n [\"finally\" \":\" suite]\\n try2_stmt ::= \"try\" \":\" suite\\n \"finally\" \":\" suite\\n\\nThe ``except`` clause(s) specify one or more exception handlers. When\\nno exception occurs in the ``try`` clause, no exception handler is\\nexecuted. When an exception occurs in the ``try`` suite, a search for\\nan exception handler is started. This search inspects the except\\nclauses in turn until one is found that matches the exception. An\\nexpression-less except clause, if present, must be last; it matches\\nany exception. For an except clause with an expression, that\\nexpression is evaluated, and the clause matches the exception if the\\nresulting object is \"compatible\" with the exception. An object is\\ncompatible with an exception if it is the class or a base class of the\\nexception object or a tuple containing an item compatible with the\\nexception.\\n\\nIf no except clause matches the exception, the search for an exception\\nhandler continues in the surrounding code and on the invocation stack.\\n[1]\\n\\nIf the evaluation of an expression in the header of an except clause\\nraises an exception, the original search for a handler is canceled and\\na search starts for the new exception in the surrounding code and on\\nthe call stack (it is treated as if the entire ``try`` statement\\nraised the exception).\\n\\nWhen a matching except clause is found, the exception is assigned to\\nthe target specified after the ``as`` keyword in that except clause,\\nif present, and the except clause\\'s suite is executed. All except\\nclauses must have an executable block. When the end of this block is\\nreached, execution continues normally after the entire try statement.\\n(This means that if two nested handlers exist for the same exception,\\nand the exception occurs in the try clause of the inner handler, the\\nouter handler will not handle the exception.)\\n\\nWhen an exception has been assigned using ``as target``, it is cleared\\nat the end of the except clause. This is as if\\n\\n except E as N:\\n foo\\n\\nwas translated to\\n\\n except E as N:\\n try:\\n foo\\n finally:\\n del N\\n\\nThis means the exception must be assigned to a different name to be\\nable to refer to it after the except clause. Exceptions are cleared\\nbecause with the traceback attached to them, they form a reference\\ncycle with the stack frame, keeping all locals in that frame alive\\nuntil the next garbage collection occurs.\\n\\nBefore an except clause\\'s suite is executed, details about the\\nexception are stored in the ``sys`` module and can be access via\\n``sys.exc_info()``. ``sys.exc_info()`` returns a 3-tuple consisting of\\nthe exception class, the exception instance and a traceback object\\n(see section *The standard type hierarchy*) identifying the point in\\nthe program where the exception occurred. ``sys.exc_info()`` values\\nare restored to their previous values (before the call) when returning\\nfrom a function that handled an exception.\\n\\nThe optional ``else`` clause is executed if and when control flows off\\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\\nare not handled by the preceding ``except`` clauses.\\n\\nIf ``finally`` is present, it specifies a \\'cleanup\\' handler. The\\n``try`` clause is executed, including any ``except`` and ``else``\\nclauses. If an exception occurs in any of the clauses and is not\\nhandled, the exception is temporarily saved. The ``finally`` clause is\\nexecuted. If there is a saved exception it is re-raised at the end of\\nthe ``finally`` clause. If the ``finally`` clause raises another\\nexception, the saved exception is set as the context of the new\\nexception. If the ``finally`` clause executes a ``return`` or\\n``break`` statement, the saved exception is discarded:\\n\\n def f():\\n try:\\n 1/0\\n finally:\\n return 42\\n\\n >>> f()\\n 42\\n\\nThe exception information is not available to the program during\\nexecution of the ``finally`` clause.\\n\\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\\nthe ``try`` suite of a ``try``...``finally`` statement, the\\n``finally`` clause is also executed \\'on the way out.\\' A ``continue``\\nstatement is illegal in the ``finally`` clause. (The reason is a\\nproblem with the current implementation --- this restriction may be\\nlifted in the future).\\n\\nAdditional information on exceptions can be found in section\\n*Exceptions*, and information on using the ``raise`` statement to\\ngenerate exceptions may be found in section *The raise statement*.\\n\\n\\nThe ``with`` statement\\n======================\\n\\nThe ``with`` statement is used to wrap the execution of a block with\\nmethods defined by a context manager (see section *With Statement\\nContext Managers*). This allows common\\n``try``...``except``...``finally`` usage patterns to be encapsulated\\nfor convenient reuse.\\n\\n with_stmt ::= \"with\" with_item (\",\" with_item)* \":\" suite\\n with_item ::= expression [\"as\" target]\\n\\nThe execution of the ``with`` statement with one \"item\" proceeds as\\nfollows:\\n\\n1. The context expression (the expression given in the ``with_item``)\\n is evaluated to obtain a context manager.\\n\\n2. The context manager\\'s ``__exit__()`` is loaded for later use.\\n\\n3. The context manager\\'s ``__enter__()`` method is invoked.\\n\\n4. If a target was included in the ``with`` statement, the return\\n value from ``__enter__()`` is assigned to it.\\n\\n Note: The ``with`` statement guarantees that if the ``__enter__()``\\n method returns without an error, then ``__exit__()`` will always\\n be called. Thus, if an error occurs during the assignment to the\\n target list, it will be treated the same as an error occurring\\n within the suite would be. See step 6 below.\\n\\n5. The suite is executed.\\n\\n6. The context manager\\'s ``__exit__()`` method is invoked. If an\\n exception caused the suite to be exited, its type, value, and\\n traceback are passed as arguments to ``__exit__()``. Otherwise,\\n three ``None`` arguments are supplied.\\n\\n If the suite was exited due to an exception, and the return value\\n from the ``__exit__()`` method was false, the exception is\\n reraised. If the return value was true, the exception is\\n suppressed, and execution continues with the statement following\\n the ``with`` statement.\\n\\n If the suite was exited for any reason other than an exception, the\\n return value from ``__exit__()`` is ignored, and execution proceeds\\n at the normal location for the kind of exit that was taken.\\n\\nWith more than one item, the context managers are processed as if\\nmultiple ``with`` statements were nested:\\n\\n with A() as a, B() as b:\\n suite\\n\\nis equivalent to\\n\\n with A() as a:\\n with B() as b:\\n suite\\n\\nChanged in version 3.1: Support for multiple context expressions.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n\\n\\nFunction definitions\\n====================\\n\\nA function definition defines a user-defined function object (see\\nsection *The standard type hierarchy*):\\n\\n funcdef ::= [decorators] \"def\" funcname \"(\" [parameter_list] \")\" [\"->\" expression] \":\" suite\\n decorators ::= decorator+\\n decorator ::= \"@\" dotted_name [\"(\" [parameter_list [\",\"]] \")\"] NEWLINE\\n dotted_name ::= identifier (\".\" identifier)*\\n parameter_list ::= (defparameter \",\")*\\n ( \"*\" [parameter] (\",\" defparameter)* [\",\" \"**\" parameter]\\n | \"**\" parameter\\n | defparameter [\",\"] )\\n parameter ::= identifier [\":\" expression]\\n defparameter ::= parameter [\"=\" expression]\\n funcname ::= identifier\\n\\nA function definition is an executable statement. Its execution binds\\nthe function name in the current local namespace to a function object\\n(a wrapper around the executable code for the function). This\\nfunction object contains a reference to the current global namespace\\nas the global namespace to be used when the function is called.\\n\\nThe function definition does not execute the function body; this gets\\nexecuted only when the function is called. [3]\\n\\nA function definition may be wrapped by one or more *decorator*\\nexpressions. Decorator expressions are evaluated when the function is\\ndefined, in the scope that contains the function definition. The\\nresult must be a callable, which is invoked with the function object\\nas the only argument. The returned value is bound to the function name\\ninstead of the function object. Multiple decorators are applied in\\nnested fashion. For example, the following code\\n\\n @f1(arg)\\n @f2\\n def func(): pass\\n\\nis equivalent to\\n\\n def func(): pass\\n func = f1(arg)(f2(func))\\n\\nWhen one or more *parameters* have the form *parameter* ``=``\\n*expression*, the function is said to have \"default parameter values.\"\\nFor a parameter with a default value, the corresponding *argument* may\\nbe omitted from a call, in which case the parameter\\'s default value is\\nsubstituted. If a parameter has a default value, all following\\nparameters up until the \"``*``\" must also have a default value ---\\nthis is a syntactic restriction that is not expressed by the grammar.\\n\\n**Default parameter values are evaluated when the function definition\\nis executed.** This means that the expression is evaluated once, when\\nthe function is defined, and that the same \"pre-computed\" value is\\nused for each call. This is especially important to understand when a\\ndefault parameter is a mutable object, such as a list or a dictionary:\\nif the function modifies the object (e.g. by appending an item to a\\nlist), the default value is in effect modified. This is generally not\\nwhat was intended. A way around this is to use ``None`` as the\\ndefault, and explicitly test for it in the body of the function, e.g.:\\n\\n def whats_on_the_telly(penguin=None):\\n if penguin is None:\\n penguin = []\\n penguin.append(\"property of the zoo\")\\n return penguin\\n\\nFunction call semantics are described in more detail in section\\n*Calls*. A function call always assigns values to all parameters\\nmentioned in the parameter list, either from position arguments, from\\nkeyword arguments, or from default values. If the form\\n\"``*identifier``\" is present, it is initialized to a tuple receiving\\nany excess positional parameters, defaulting to the empty tuple. If\\nthe form \"``**identifier``\" is present, it is initialized to a new\\ndictionary receiving any excess keyword arguments, defaulting to a new\\nempty dictionary. Parameters after \"``*``\" or \"``*identifier``\" are\\nkeyword-only parameters and may only be passed used keyword arguments.\\n\\nParameters may have annotations of the form \"``: expression``\"\\nfollowing the parameter name. Any parameter may have an annotation\\neven those of the form ``*identifier`` or ``**identifier``. Functions\\nmay have \"return\" annotation of the form \"``-> expression``\" after the\\nparameter list. These annotations can be any valid Python expression\\nand are evaluated when the function definition is executed.\\nAnnotations may be evaluated in a different order than they appear in\\nthe source code. The presence of annotations does not change the\\nsemantics of a function. The annotation values are available as\\nvalues of a dictionary keyed by the parameters\\' names in the\\n``__annotations__`` attribute of the function object.\\n\\nIt is also possible to create anonymous functions (functions not bound\\nto a name), for immediate use in expressions. This uses lambda forms,\\ndescribed in section *Lambdas*. Note that the lambda form is merely a\\nshorthand for a simplified function definition; a function defined in\\na \"``def``\" statement can be passed around or assigned to another name\\njust like a function defined by a lambda form. The \"``def``\" form is\\nactually more powerful since it allows the execution of multiple\\nstatements and annotations.\\n\\n**Programmer\\'s note:** Functions are first-class objects. A \"``def``\"\\nform executed inside a function definition defines a local function\\nthat can be returned or passed around. Free variables used in the\\nnested function can access the local variables of the function\\ncontaining the def. See section *Naming and binding* for details.\\n\\nSee also:\\n\\n **PEP 3107** - Function Annotations\\n The original specification for function annotations.\\n\\n\\nClass definitions\\n=================\\n\\nA class definition defines a class object (see section *The standard\\ntype hierarchy*):\\n\\n classdef ::= [decorators] \"class\" classname [inheritance] \":\" suite\\n inheritance ::= \"(\" [parameter_list] \")\"\\n classname ::= identifier\\n\\nA class definition is an executable statement. The inheritance list\\nusually gives a list of base classes (see *Customizing class creation*\\nfor more advanced uses), so each item in the list should evaluate to a\\nclass object which allows subclassing. Classes without an inheritance\\nlist inherit, by default, from the base class ``object``; hence,\\n\\n class Foo:\\n pass\\n\\nis equivalent to\\n\\n class Foo(object):\\n pass\\n\\nThe class\\'s suite is then executed in a new execution frame (see\\n*Naming and binding*), using a newly created local namespace and the\\noriginal global namespace. (Usually, the suite contains mostly\\nfunction definitions.) When the class\\'s suite finishes execution, its\\nexecution frame is discarded but its local namespace is saved. [4] A\\nclass object is then created using the inheritance list for the base\\nclasses and the saved local namespace for the attribute dictionary.\\nThe class name is bound to this class object in the original local\\nnamespace.\\n\\nClass creation can be customized heavily using *metaclasses*.\\n\\nClasses can also be decorated: just like when decorating functions,\\n\\n @f1(arg)\\n @f2\\n class Foo: pass\\n\\nis equivalent to\\n\\n class Foo: pass\\n Foo = f1(arg)(f2(Foo))\\n\\nThe evaluation rules for the decorator expressions are the same as for\\nfunction decorators. The result must be a class object, which is then\\nbound to the class name.\\n\\n**Programmer\\'s note:** Variables defined in the class definition are\\nclass attributes; they are shared by instances. Instance attributes\\ncan be set in a method with ``self.name = value``. Both class and\\ninstance attributes are accessible through the notation\\n\"``self.name``\", and an instance attribute hides a class attribute\\nwith the same name when accessed in this way. Class attributes can be\\nused as defaults for instance attributes, but using mutable values\\nthere can lead to unexpected results. *Descriptors* can be used to\\ncreate instance variables with different implementation details.\\n\\nSee also:\\n\\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\\n Decorators\\n\\n-[ Footnotes ]-\\n\\n[1] The exception is propagated to the invocation stack unless there\\n is a ``finally`` clause which happens to raise another exception.\\n That new exception causes the old one to be lost.\\n\\n[2] Currently, control \"flows off the end\" except in the case of an\\n exception or the execution of a ``return``, ``continue``, or\\n ``break`` statement.\\n\\n[3] A string literal appearing as the first statement in the function\\n body is transformed into the function\\'s ``__doc__`` attribute and\\n therefore the function\\'s *docstring*.\\n\\n[4] A string literal appearing as the first statement in the class\\n body is transformed into the namespace\\'s ``__doc__`` item and\\n therefore the class\\'s *docstring*.\\n',\n'context-managers':'\\nWith Statement Context Managers\\n*******************************\\n\\nA *context manager* is an object that defines the runtime context to\\nbe established when executing a ``with`` statement. The context\\nmanager handles the entry into, and the exit from, the desired runtime\\ncontext for the execution of the block of code. Context managers are\\nnormally invoked using the ``with`` statement (described in section\\n*The with statement*), but can also be used by directly invoking their\\nmethods.\\n\\nTypical uses of context managers include saving and restoring various\\nkinds of global state, locking and unlocking resources, closing opened\\nfiles, etc.\\n\\nFor more information on context managers, see *Context Manager Types*.\\n\\nobject.__enter__(self)\\n\\n Enter the runtime context related to this object. The ``with``\\n statement will bind this method\\'s return value to the target(s)\\n specified in the ``as`` clause of the statement, if any.\\n\\nobject.__exit__(self, exc_type, exc_value, traceback)\\n\\n Exit the runtime context related to this object. The parameters\\n describe the exception that caused the context to be exited. If the\\n context was exited without an exception, all three arguments will\\n be ``None``.\\n\\n If an exception is supplied, and the method wishes to suppress the\\n exception (i.e., prevent it from being propagated), it should\\n return a true value. Otherwise, the exception will be processed\\n normally upon exit from this method.\\n\\n Note that ``__exit__()`` methods should not reraise the passed-in\\n exception; this is the caller\\'s responsibility.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n',\n'continue':'\\nThe ``continue`` statement\\n**************************\\n\\n continue_stmt ::= \"continue\"\\n\\n``continue`` may only occur syntactically nested in a ``for`` or\\n``while`` loop, but not nested in a function or class definition or\\n``finally`` clause within that loop. It continues with the next cycle\\nof the nearest enclosing loop.\\n\\nWhen ``continue`` passes control out of a ``try`` statement with a\\n``finally`` clause, that ``finally`` clause is executed before really\\nstarting the next loop cycle.\\n',\n'conversions':'\\nArithmetic conversions\\n**********************\\n\\nWhen a description of an arithmetic operator below uses the phrase\\n\"the numeric arguments are converted to a common type,\" this means\\nthat the operator implementation for built-in types works that way:\\n\\n* If either argument is a complex number, the other is converted to\\n complex;\\n\\n* otherwise, if either argument is a floating point number, the other\\n is converted to floating point;\\n\\n* otherwise, both must be integers and no conversion is necessary.\\n\\nSome additional rules apply for certain operators (e.g., a string left\\nargument to the \\'%\\' operator). Extensions must define their own\\nconversion behavior.\\n',\n'customization':'\\nBasic customization\\n*******************\\n\\nobject.__new__(cls[, ...])\\n\\n Called to create a new instance of class *cls*. ``__new__()`` is a\\n static method (special-cased so you need not declare it as such)\\n that takes the class of which an instance was requested as its\\n first argument. The remaining arguments are those passed to the\\n object constructor expression (the call to the class). The return\\n value of ``__new__()`` should be the new object instance (usually\\n an instance of *cls*).\\n\\n Typical implementations create a new instance of the class by\\n invoking the superclass\\'s ``__new__()`` method using\\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\\n arguments and then modifying the newly-created instance as\\n necessary before returning it.\\n\\n If ``__new__()`` returns an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will be invoked like\\n ``__init__(self[, ...])``, where *self* is the new instance and the\\n remaining arguments are the same as were passed to ``__new__()``.\\n\\n If ``__new__()`` does not return an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will not be invoked.\\n\\n ``__new__()`` is intended mainly to allow subclasses of immutable\\n types (like int, str, or tuple) to customize instance creation. It\\n is also commonly overridden in custom metaclasses in order to\\n customize class creation.\\n\\nobject.__init__(self[, ...])\\n\\n Called when the instance is created. The arguments are those\\n passed to the class constructor expression. If a base class has an\\n ``__init__()`` method, the derived class\\'s ``__init__()`` method,\\n if any, must explicitly call it to ensure proper initialization of\\n the base class part of the instance; for example:\\n ``BaseClass.__init__(self, [args...])``. As a special constraint\\n on constructors, no value may be returned; doing so will cause a\\n ``TypeError`` to be raised at runtime.\\n\\nobject.__del__(self)\\n\\n Called when the instance is about to be destroyed. This is also\\n called a destructor. If a base class has a ``__del__()`` method,\\n the derived class\\'s ``__del__()`` method, if any, must explicitly\\n call it to ensure proper deletion of the base class part of the\\n instance. Note that it is possible (though not recommended!) for\\n the ``__del__()`` method to postpone destruction of the instance by\\n creating a new reference to it. It may then be called at a later\\n time when this new reference is deleted. It is not guaranteed that\\n ``__del__()`` methods are called for objects that still exist when\\n the interpreter exits.\\n\\n Note: ``del x`` doesn\\'t directly call ``x.__del__()`` --- the former\\n decrements the reference count for ``x`` by one, and the latter\\n is only called when ``x``\\'s reference count reaches zero. Some\\n common situations that may prevent the reference count of an\\n object from going to zero include: circular references between\\n objects (e.g., a doubly-linked list or a tree data structure with\\n parent and child pointers); a reference to the object on the\\n stack frame of a function that caught an exception (the traceback\\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\\n a reference to the object on the stack frame that raised an\\n unhandled exception in interactive mode (the traceback stored in\\n ``sys.last_traceback`` keeps the stack frame alive). The first\\n situation can only be remedied by explicitly breaking the cycles;\\n the latter two situations can be resolved by storing ``None`` in\\n ``sys.last_traceback``. Circular references which are garbage are\\n detected when the option cycle detector is enabled (it\\'s on by\\n default), but can only be cleaned up if there are no Python-\\n level ``__del__()`` methods involved. Refer to the documentation\\n for the ``gc`` module for more information about how\\n ``__del__()`` methods are handled by the cycle detector,\\n particularly the description of the ``garbage`` value.\\n\\n Warning: Due to the precarious circumstances under which ``__del__()``\\n methods are invoked, exceptions that occur during their execution\\n are ignored, and a warning is printed to ``sys.stderr`` instead.\\n Also, when ``__del__()`` is invoked in response to a module being\\n deleted (e.g., when execution of the program is done), other\\n globals referenced by the ``__del__()`` method may already have\\n been deleted or in the process of being torn down (e.g. the\\n import machinery shutting down). For this reason, ``__del__()``\\n methods should do the absolute minimum needed to maintain\\n external invariants. Starting with version 1.5, Python\\n guarantees that globals whose name begins with a single\\n underscore are deleted from their module before other globals are\\n deleted; if no other references to such globals exist, this may\\n help in assuring that imported modules are still available at the\\n time when the ``__del__()`` method is called.\\n\\nobject.__repr__(self)\\n\\n Called by the ``repr()`` built-in function to compute the\\n \"official\" string representation of an object. If at all possible,\\n this should look like a valid Python expression that could be used\\n to recreate an object with the same value (given an appropriate\\n environment). If this is not possible, a string of the form\\n ``<...some useful description...>`` should be returned. The return\\n value must be a string object. If a class defines ``__repr__()``\\n but not ``__str__()``, then ``__repr__()`` is also used when an\\n \"informal\" string representation of instances of that class is\\n required.\\n\\n This is typically used for debugging, so it is important that the\\n representation is information-rich and unambiguous.\\n\\nobject.__str__(self)\\n\\n Called by ``str(object)`` and the built-in functions ``format()``\\n and ``print()`` to compute the \"informal\" or nicely printable\\n string representation of an object. The return value must be a\\n *string* object.\\n\\n This method differs from ``object.__repr__()`` in that there is no\\n expectation that ``__str__()`` return a valid Python expression: a\\n more convenient or concise representation can be used.\\n\\n The default implementation defined by the built-in type ``object``\\n calls ``object.__repr__()``.\\n\\nobject.__bytes__(self)\\n\\n Called by ``bytes()`` to compute a byte-string representation of an\\n object. This should return a ``bytes`` object.\\n\\nobject.__format__(self, format_spec)\\n\\n Called by the ``format()`` built-in function (and by extension, the\\n ``str.format()`` method of class ``str``) to produce a \"formatted\"\\n string representation of an object. The ``format_spec`` argument is\\n a string that contains a description of the formatting options\\n desired. The interpretation of the ``format_spec`` argument is up\\n to the type implementing ``__format__()``, however most classes\\n will either delegate formatting to one of the built-in types, or\\n use a similar formatting option syntax.\\n\\n See *Format Specification Mini-Language* for a description of the\\n standard formatting syntax.\\n\\n The return value must be a string object.\\n\\nobject.__lt__(self, other)\\nobject.__le__(self, other)\\nobject.__eq__(self, other)\\nobject.__ne__(self, other)\\nobject.__gt__(self, other)\\nobject.__ge__(self, other)\\n\\n These are the so-called \"rich comparison\" methods. The\\n correspondence between operator symbols and method names is as\\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\\n ``x.__ge__(y)``.\\n\\n A rich comparison method may return the singleton\\n ``NotImplemented`` if it does not implement the operation for a\\n given pair of arguments. By convention, ``False`` and ``True`` are\\n returned for a successful comparison. However, these methods can\\n return any value, so if the comparison operator is used in a\\n Boolean context (e.g., in the condition of an ``if`` statement),\\n Python will call ``bool()`` on the value to determine if the result\\n is true or false.\\n\\n There are no implied relationships among the comparison operators.\\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\\n Accordingly, when defining ``__eq__()``, one should also define\\n ``__ne__()`` so that the operators will behave as expected. See\\n the paragraph on ``__hash__()`` for some important notes on\\n creating *hashable* objects which support custom comparison\\n operations and are usable as dictionary keys.\\n\\n There are no swapped-argument versions of these methods (to be used\\n when the left argument does not support the operation but the right\\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\\n other\\'s reflection, ``__le__()`` and ``__ge__()`` are each other\\'s\\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\\n reflection.\\n\\n Arguments to rich comparison methods are never coerced.\\n\\n To automatically generate ordering operations from a single root\\n operation, see ``functools.total_ordering()``.\\n\\nobject.__hash__(self)\\n\\n Called by built-in function ``hash()`` and for operations on\\n members of hashed collections including ``set``, ``frozenset``, and\\n ``dict``. ``__hash__()`` should return an integer. The only\\n required property is that objects which compare equal have the same\\n hash value; it is advised to somehow mix together (e.g. using\\n exclusive or) the hash values for the components of the object that\\n also play a part in comparison of objects.\\n\\n If a class does not define an ``__eq__()`` method it should not\\n define a ``__hash__()`` operation either; if it defines\\n ``__eq__()`` but not ``__hash__()``, its instances will not be\\n usable as items in hashable collections. If a class defines\\n mutable objects and implements an ``__eq__()`` method, it should\\n not implement ``__hash__()``, since the implementation of hashable\\n collections requires that a key\\'s hash value is immutable (if the\\n object\\'s hash value changes, it will be in the wrong hash bucket).\\n\\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\\n by default; with them, all objects compare unequal (except with\\n themselves) and ``x.__hash__()`` returns an appropriate value such\\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\\n hash(y)``.\\n\\n A class that overrides ``__eq__()`` and does not define\\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\\n ``None``. When the ``__hash__()`` method of a class is ``None``,\\n instances of the class will raise an appropriate ``TypeError`` when\\n a program attempts to retrieve their hash value, and will also be\\n correctly identified as unhashable when checking ``isinstance(obj,\\n collections.Hashable``).\\n\\n If a class that overrides ``__eq__()`` needs to retain the\\n implementation of ``__hash__()`` from a parent class, the\\n interpreter must be told this explicitly by setting ``__hash__ =\\n .__hash__``.\\n\\n If a class that does not override ``__eq__()`` wishes to suppress\\n hash support, it should include ``__hash__ = None`` in the class\\n definition. A class which defines its own ``__hash__()`` that\\n explicitly raises a ``TypeError`` would be incorrectly identified\\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\\n\\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\\n objects are \"salted\" with an unpredictable random value.\\n Although they remain constant within an individual Python\\n process, they are not predictable between repeated invocations of\\n Python.This is intended to provide protection against a denial-\\n of-service caused by carefully-chosen inputs that exploit the\\n worst case performance of a dict insertion, O(n^2) complexity.\\n See http://www.ocert.org/advisories/ocert-2011-003.html for\\n details.Changing hash values affects the iteration order of\\n dicts, sets and other mappings. Python has never made guarantees\\n about this ordering (and it typically varies between 32-bit and\\n 64-bit builds).See also ``PYTHONHASHSEED``.\\n\\n Changed in version 3.3: Hash randomization is enabled by default.\\n\\nobject.__bool__(self)\\n\\n Called to implement truth value testing and the built-in operation\\n ``bool()``; should return ``False`` or ``True``. When this method\\n is not defined, ``__len__()`` is called, if it is defined, and the\\n object is considered true if its result is nonzero. If a class\\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\\n are considered true.\\n',\n'debugger':'\\n``pdb`` --- The Python Debugger\\n*******************************\\n\\nThe module ``pdb`` defines an interactive source code debugger for\\nPython programs. It supports setting (conditional) breakpoints and\\nsingle stepping at the source line level, inspection of stack frames,\\nsource code listing, and evaluation of arbitrary Python code in the\\ncontext of any stack frame. It also supports post-mortem debugging\\nand can be called under program control.\\n\\nThe debugger is extensible -- it is actually defined as the class\\n``Pdb``. This is currently undocumented but easily understood by\\nreading the source. The extension interface uses the modules ``bdb``\\nand ``cmd``.\\n\\nThe debugger\\'s prompt is ``(Pdb)``. Typical usage to run a program\\nunder control of the debugger is:\\n\\n >>> import pdb\\n >>> import mymodule\\n >>> pdb.run(\\'mymodule.test()\\')\\n > (0)?()\\n (Pdb) continue\\n > (1)?()\\n (Pdb) continue\\n NameError: \\'spam\\'\\n > (1)?()\\n (Pdb)\\n\\nChanged in version 3.3: Tab-completion via the ``readline`` module is\\navailable for commands and command arguments, e.g. the current global\\nand local names are offered as arguments of the ``print`` command.\\n\\n``pdb.py`` can also be invoked as a script to debug other scripts.\\nFor example:\\n\\n python3 -m pdb myscript.py\\n\\nWhen invoked as a script, pdb will automatically enter post-mortem\\ndebugging if the program being debugged exits abnormally. After post-\\nmortem debugging (or after normal exit of the program), pdb will\\nrestart the program. Automatic restarting preserves pdb\\'s state (such\\nas breakpoints) and in most cases is more useful than quitting the\\ndebugger upon program\\'s exit.\\n\\nNew in version 3.2: ``pdb.py`` now accepts a ``-c`` option that\\nexecutes commands as if given in a ``.pdbrc`` file, see *Debugger\\nCommands*.\\n\\nThe typical usage to break into the debugger from a running program is\\nto insert\\n\\n import pdb; pdb.set_trace()\\n\\nat the location you want to break into the debugger. You can then\\nstep through the code following this statement, and continue running\\nwithout the debugger using the ``continue`` command.\\n\\nThe typical usage to inspect a crashed program is:\\n\\n >>> import pdb\\n >>> import mymodule\\n >>> mymodule.test()\\n Traceback (most recent call last):\\n File \"\", line 1, in ?\\n File \"./mymodule.py\", line 4, in test\\n test2()\\n File \"./mymodule.py\", line 3, in test2\\n print(spam)\\n NameError: spam\\n >>> pdb.pm()\\n > ./mymodule.py(3)test2()\\n -> print(spam)\\n (Pdb)\\n\\nThe module defines the following functions; each enters the debugger\\nin a slightly different way:\\n\\npdb.run(statement, globals=None, locals=None)\\n\\n Execute the *statement* (given as a string or a code object) under\\n debugger control. The debugger prompt appears before any code is\\n executed; you can set breakpoints and type ``continue``, or you can\\n step through the statement using ``step`` or ``next`` (all these\\n commands are explained below). The optional *globals* and *locals*\\n arguments specify the environment in which the code is executed; by\\n default the dictionary of the module ``__main__`` is used. (See\\n the explanation of the built-in ``exec()`` or ``eval()``\\n functions.)\\n\\npdb.runeval(expression, globals=None, locals=None)\\n\\n Evaluate the *expression* (given as a string or a code object)\\n under debugger control. When ``runeval()`` returns, it returns the\\n value of the expression. Otherwise this function is similar to\\n ``run()``.\\n\\npdb.runcall(function, *args, **kwds)\\n\\n Call the *function* (a function or method object, not a string)\\n with the given arguments. When ``runcall()`` returns, it returns\\n whatever the function call returned. The debugger prompt appears\\n as soon as the function is entered.\\n\\npdb.set_trace()\\n\\n Enter the debugger at the calling stack frame. This is useful to\\n hard-code a breakpoint at a given point in a program, even if the\\n code is not otherwise being debugged (e.g. when an assertion\\n fails).\\n\\npdb.post_mortem(traceback=None)\\n\\n Enter post-mortem debugging of the given *traceback* object. If no\\n *traceback* is given, it uses the one of the exception that is\\n currently being handled (an exception must be being handled if the\\n default is to be used).\\n\\npdb.pm()\\n\\n Enter post-mortem debugging of the traceback found in\\n ``sys.last_traceback``.\\n\\nThe ``run*`` functions and ``set_trace()`` are aliases for\\ninstantiating the ``Pdb`` class and calling the method of the same\\nname. If you want to access further features, you have to do this\\nyourself:\\n\\nclass class pdb.Pdb(completekey=\\'tab\\', stdin=None, stdout=None, skip=None, nosigint=False)\\n\\n ``Pdb`` is the debugger class.\\n\\n The *completekey*, *stdin* and *stdout* arguments are passed to the\\n underlying ``cmd.Cmd`` class; see the description there.\\n\\n The *skip* argument, if given, must be an iterable of glob-style\\n module name patterns. The debugger will not step into frames that\\n originate in a module that matches one of these patterns. [1]\\n\\n By default, Pdb sets a handler for the SIGINT signal (which is sent\\n when the user presses Ctrl-C on the console) when you give a\\n ``continue`` command. This allows you to break into the debugger\\n again by pressing Ctrl-C. If you want Pdb not to touch the SIGINT\\n handler, set *nosigint* tot true.\\n\\n Example call to enable tracing with *skip*:\\n\\n import pdb; pdb.Pdb(skip=[\\'django.*\\']).set_trace()\\n\\n New in version 3.1: The *skip* argument.\\n\\n New in version 3.2: The *nosigint* argument. Previously, a SIGINT\\n handler was never set by Pdb.\\n\\n run(statement, globals=None, locals=None)\\n runeval(expression, globals=None, locals=None)\\n runcall(function, *args, **kwds)\\n set_trace()\\n\\n See the documentation for the functions explained above.\\n\\n\\nDebugger Commands\\n=================\\n\\nThe commands recognized by the debugger are listed below. Most\\ncommands can be abbreviated to one or two letters as indicated; e.g.\\n``h(elp)`` means that either ``h`` or ``help`` can be used to enter\\nthe help command (but not ``he`` or ``hel``, nor ``H`` or ``Help`` or\\n``HELP``). Arguments to commands must be separated by whitespace\\n(spaces or tabs). Optional arguments are enclosed in square brackets\\n(``[]``) in the command syntax; the square brackets must not be typed.\\nAlternatives in the command syntax are separated by a vertical bar\\n(``|``).\\n\\nEntering a blank line repeats the last command entered. Exception: if\\nthe last command was a ``list`` command, the next 11 lines are listed.\\n\\nCommands that the debugger doesn\\'t recognize are assumed to be Python\\nstatements and are executed in the context of the program being\\ndebugged. Python statements can also be prefixed with an exclamation\\npoint (``!``). This is a powerful way to inspect the program being\\ndebugged; it is even possible to change a variable or call a function.\\nWhen an exception occurs in such a statement, the exception name is\\nprinted but the debugger\\'s state is not changed.\\n\\nThe debugger supports *aliases*. Aliases can have parameters which\\nallows one a certain level of adaptability to the context under\\nexamination.\\n\\nMultiple commands may be entered on a single line, separated by\\n``;;``. (A single ``;`` is not used as it is the separator for\\nmultiple commands in a line that is passed to the Python parser.) No\\nintelligence is applied to separating the commands; the input is split\\nat the first ``;;`` pair, even if it is in the middle of a quoted\\nstring.\\n\\nIf a file ``.pdbrc`` exists in the user\\'s home directory or in the\\ncurrent directory, it is read in and executed as if it had been typed\\nat the debugger prompt. This is particularly useful for aliases. If\\nboth files exist, the one in the home directory is read first and\\naliases defined there can be overridden by the local file.\\n\\nChanged in version 3.2: ``.pdbrc`` can now contain commands that\\ncontinue debugging, such as ``continue`` or ``next``. Previously,\\nthese commands had no effect.\\n\\nh(elp) [command]\\n\\n Without argument, print the list of available commands. With a\\n *command* as argument, print help about that command. ``help pdb``\\n displays the full documentation (the docstring of the ``pdb``\\n module). Since the *command* argument must be an identifier,\\n ``help exec`` must be entered to get help on the ``!`` command.\\n\\nw(here)\\n\\n Print a stack trace, with the most recent frame at the bottom. An\\n arrow indicates the current frame, which determines the context of\\n most commands.\\n\\nd(own) [count]\\n\\n Move the current frame *count* (default one) levels down in the\\n stack trace (to a newer frame).\\n\\nu(p) [count]\\n\\n Move the current frame *count* (default one) levels up in the stack\\n trace (to an older frame).\\n\\nb(reak) [([filename:]lineno | function) [, condition]]\\n\\n With a *lineno* argument, set a break there in the current file.\\n With a *function* argument, set a break at the first executable\\n statement within that function. The line number may be prefixed\\n with a filename and a colon, to specify a breakpoint in another\\n file (probably one that hasn\\'t been loaded yet). The file is\\n searched on ``sys.path``. Note that each breakpoint is assigned a\\n number to which all the other breakpoint commands refer.\\n\\n If a second argument is present, it is an expression which must\\n evaluate to true before the breakpoint is honored.\\n\\n Without argument, list all breaks, including for each breakpoint,\\n the number of times that breakpoint has been hit, the current\\n ignore count, and the associated condition if any.\\n\\ntbreak [([filename:]lineno | function) [, condition]]\\n\\n Temporary breakpoint, which is removed automatically when it is\\n first hit. The arguments are the same as for ``break``.\\n\\ncl(ear) [filename:lineno | bpnumber [bpnumber ...]]\\n\\n With a *filename:lineno* argument, clear all the breakpoints at\\n this line. With a space separated list of breakpoint numbers, clear\\n those breakpoints. Without argument, clear all breaks (but first\\n ask confirmation).\\n\\ndisable [bpnumber [bpnumber ...]]\\n\\n Disable the breakpoints given as a space separated list of\\n breakpoint numbers. Disabling a breakpoint means it cannot cause\\n the program to stop execution, but unlike clearing a breakpoint, it\\n remains in the list of breakpoints and can be (re-)enabled.\\n\\nenable [bpnumber [bpnumber ...]]\\n\\n Enable the breakpoints specified.\\n\\nignore bpnumber [count]\\n\\n Set the ignore count for the given breakpoint number. If count is\\n omitted, the ignore count is set to 0. A breakpoint becomes active\\n when the ignore count is zero. When non-zero, the count is\\n decremented each time the breakpoint is reached and the breakpoint\\n is not disabled and any associated condition evaluates to true.\\n\\ncondition bpnumber [condition]\\n\\n Set a new *condition* for the breakpoint, an expression which must\\n evaluate to true before the breakpoint is honored. If *condition*\\n is absent, any existing condition is removed; i.e., the breakpoint\\n is made unconditional.\\n\\ncommands [bpnumber]\\n\\n Specify a list of commands for breakpoint number *bpnumber*. The\\n commands themselves appear on the following lines. Type a line\\n containing just ``end`` to terminate the commands. An example:\\n\\n (Pdb) commands 1\\n (com) print some_variable\\n (com) end\\n (Pdb)\\n\\n To remove all commands from a breakpoint, type commands and follow\\n it immediately with ``end``; that is, give no commands.\\n\\n With no *bpnumber* argument, commands refers to the last breakpoint\\n set.\\n\\n You can use breakpoint commands to start your program up again.\\n Simply use the continue command, or step, or any other command that\\n resumes execution.\\n\\n Specifying any command resuming execution (currently continue,\\n step, next, return, jump, quit and their abbreviations) terminates\\n the command list (as if that command was immediately followed by\\n end). This is because any time you resume execution (even with a\\n simple next or step), you may encounter another breakpoint--which\\n could have its own command list, leading to ambiguities about which\\n list to execute.\\n\\n If you use the \\'silent\\' command in the command list, the usual\\n message about stopping at a breakpoint is not printed. This may be\\n desirable for breakpoints that are to print a specific message and\\n then continue. If none of the other commands print anything, you\\n see no sign that the breakpoint was reached.\\n\\ns(tep)\\n\\n Execute the current line, stop at the first possible occasion\\n (either in a function that is called or on the next line in the\\n current function).\\n\\nn(ext)\\n\\n Continue execution until the next line in the current function is\\n reached or it returns. (The difference between ``next`` and\\n ``step`` is that ``step`` stops inside a called function, while\\n ``next`` executes called functions at (nearly) full speed, only\\n stopping at the next line in the current function.)\\n\\nunt(il) [lineno]\\n\\n Without argument, continue execution until the line with a number\\n greater than the current one is reached.\\n\\n With a line number, continue execution until a line with a number\\n greater or equal to that is reached. In both cases, also stop when\\n the current frame returns.\\n\\n Changed in version 3.2: Allow giving an explicit line number.\\n\\nr(eturn)\\n\\n Continue execution until the current function returns.\\n\\nc(ont(inue))\\n\\n Continue execution, only stop when a breakpoint is encountered.\\n\\nj(ump) lineno\\n\\n Set the next line that will be executed. Only available in the\\n bottom-most frame. This lets you jump back and execute code again,\\n or jump forward to skip code that you don\\'t want to run.\\n\\n It should be noted that not all jumps are allowed -- for instance\\n it is not possible to jump into the middle of a ``for`` loop or out\\n of a ``finally`` clause.\\n\\nl(ist) [first[, last]]\\n\\n List source code for the current file. Without arguments, list 11\\n lines around the current line or continue the previous listing.\\n With ``.`` as argument, list 11 lines around the current line.\\n With one argument, list 11 lines around at that line. With two\\n arguments, list the given range; if the second argument is less\\n than the first, it is interpreted as a count.\\n\\n The current line in the current frame is indicated by ``->``. If\\n an exception is being debugged, the line where the exception was\\n originally raised or propagated is indicated by ``>>``, if it\\n differs from the current line.\\n\\n New in version 3.2: The ``>>`` marker.\\n\\nll | longlist\\n\\n List all source code for the current function or frame.\\n Interesting lines are marked as for ``list``.\\n\\n New in version 3.2.\\n\\na(rgs)\\n\\n Print the argument list of the current function.\\n\\np(rint) expression\\n\\n Evaluate the *expression* in the current context and print its\\n value.\\n\\npp expression\\n\\n Like the ``print`` command, except the value of the expression is\\n pretty-printed using the ``pprint`` module.\\n\\nwhatis expression\\n\\n Print the type of the *expression*.\\n\\nsource expression\\n\\n Try to get source code for the given object and display it.\\n\\n New in version 3.2.\\n\\ndisplay [expression]\\n\\n Display the value of the expression if it changed, each time\\n execution stops in the current frame.\\n\\n Without expression, list all display expressions for the current\\n frame.\\n\\n New in version 3.2.\\n\\nundisplay [expression]\\n\\n Do not display the expression any more in the current frame.\\n Without expression, clear all display expressions for the current\\n frame.\\n\\n New in version 3.2.\\n\\ninteract\\n\\n Start an interative interpreter (using the ``code`` module) whose\\n global namespace contains all the (global and local) names found in\\n the current scope.\\n\\n New in version 3.2.\\n\\nalias [name [command]]\\n\\n Create an alias called *name* that executes *command*. The command\\n must *not* be enclosed in quotes. Replaceable parameters can be\\n indicated by ``%1``, ``%2``, and so on, while ``%*`` is replaced by\\n all the parameters. If no command is given, the current alias for\\n *name* is shown. If no arguments are given, all aliases are listed.\\n\\n Aliases may be nested and can contain anything that can be legally\\n typed at the pdb prompt. Note that internal pdb commands *can* be\\n overridden by aliases. Such a command is then hidden until the\\n alias is removed. Aliasing is recursively applied to the first\\n word of the command line; all other words in the line are left\\n alone.\\n\\n As an example, here are two useful aliases (especially when placed\\n in the ``.pdbrc`` file):\\n\\n # Print instance variables (usage \"pi classInst\")\\n alias pi for k in %1.__dict__.keys(): print(\"%1.\",k,\"=\",%1.__dict__[k])\\n # Print instance variables in self\\n alias ps pi self\\n\\nunalias name\\n\\n Delete the specified alias.\\n\\n! statement\\n\\n Execute the (one-line) *statement* in the context of the current\\n stack frame. The exclamation point can be omitted unless the first\\n word of the statement resembles a debugger command. To set a\\n global variable, you can prefix the assignment command with a\\n ``global`` statement on the same line, e.g.:\\n\\n (Pdb) global list_options; list_options = [\\'-l\\']\\n (Pdb)\\n\\nrun [args ...]\\nrestart [args ...]\\n\\n Restart the debugged Python program. If an argument is supplied,\\n it is split with ``shlex`` and the result is used as the new\\n ``sys.argv``. History, breakpoints, actions and debugger options\\n are preserved. ``restart`` is an alias for ``run``.\\n\\nq(uit)\\n\\n Quit from the debugger. The program being executed is aborted.\\n\\n-[ Footnotes ]-\\n\\n[1] Whether a frame is considered to originate in a certain module is\\n determined by the ``__name__`` in the frame globals.\\n',\n'del':'\\nThe ``del`` statement\\n*********************\\n\\n del_stmt ::= \"del\" target_list\\n\\nDeletion is recursively defined very similar to the way assignment is\\ndefined. Rather than spelling it out in full details, here are some\\nhints.\\n\\nDeletion of a target list recursively deletes each target, from left\\nto right.\\n\\nDeletion of a name removes the binding of that name from the local or\\nglobal namespace, depending on whether the name occurs in a ``global``\\nstatement in the same code block. If the name is unbound, a\\n``NameError`` exception will be raised.\\n\\nDeletion of attribute references, subscriptions and slicings is passed\\nto the primary object involved; deletion of a slicing is in general\\nequivalent to assignment of an empty slice of the right type (but even\\nthis is determined by the sliced object).\\n\\nChanged in version 3.2: Previously it was illegal to delete a name\\nfrom the local namespace if it occurs as a free variable in a nested\\nblock.\\n',\n'dict':'\\nDictionary displays\\n*******************\\n\\nA dictionary display is a possibly empty series of key/datum pairs\\nenclosed in curly braces:\\n\\n dict_display ::= \"{\" [key_datum_list | dict_comprehension] \"}\"\\n key_datum_list ::= key_datum (\",\" key_datum)* [\",\"]\\n key_datum ::= expression \":\" expression\\n dict_comprehension ::= expression \":\" expression comp_for\\n\\nA dictionary display yields a new dictionary object.\\n\\nIf a comma-separated sequence of key/datum pairs is given, they are\\nevaluated from left to right to define the entries of the dictionary:\\neach key object is used as a key into the dictionary to store the\\ncorresponding datum. This means that you can specify the same key\\nmultiple times in the key/datum list, and the final dictionary\\'s value\\nfor that key will be the last one given.\\n\\nA dict comprehension, in contrast to list and set comprehensions,\\nneeds two expressions separated with a colon followed by the usual\\n\"for\" and \"if\" clauses. When the comprehension is run, the resulting\\nkey and value elements are inserted in the new dictionary in the order\\nthey are produced.\\n\\nRestrictions on the types of the key values are listed earlier in\\nsection *The standard type hierarchy*. (To summarize, the key type\\nshould be *hashable*, which excludes all mutable objects.) Clashes\\nbetween duplicate keys are not detected; the last datum (textually\\nrightmost in the display) stored for a given key value prevails.\\n',\n'dynamic-features':'\\nInteraction with dynamic features\\n*********************************\\n\\nThere are several cases where Python statements are illegal when used\\nin conjunction with nested scopes that contain free variables.\\n\\nIf a variable is referenced in an enclosing scope, it is illegal to\\ndelete the name. An error will be reported at compile time.\\n\\nIf the wild card form of import --- ``import *`` --- is used in a\\nfunction and the function contains or is a nested block with free\\nvariables, the compiler will raise a ``SyntaxError``.\\n\\nThe ``eval()`` and ``exec()`` functions do not have access to the full\\nenvironment for resolving names. Names may be resolved in the local\\nand global namespaces of the caller. Free variables are not resolved\\nin the nearest enclosing namespace, but in the global namespace. [1]\\nThe ``exec()`` and ``eval()`` functions have optional arguments to\\noverride the global and local namespace. If only one namespace is\\nspecified, it is used for both.\\n',\n'else':'\\nThe ``if`` statement\\n********************\\n\\nThe ``if`` statement is used for conditional execution:\\n\\n if_stmt ::= \"if\" expression \":\" suite\\n ( \"elif\" expression \":\" suite )*\\n [\"else\" \":\" suite]\\n\\nIt selects exactly one of the suites by evaluating the expressions one\\nby one until one is found to be true (see section *Boolean operations*\\nfor the definition of true and false); then that suite is executed\\n(and no other part of the ``if`` statement is executed or evaluated).\\nIf all expressions are false, the suite of the ``else`` clause, if\\npresent, is executed.\\n',\n'exceptions':'\\nExceptions\\n**********\\n\\nExceptions are a means of breaking out of the normal flow of control\\nof a code block in order to handle errors or other exceptional\\nconditions. An exception is *raised* at the point where the error is\\ndetected; it may be *handled* by the surrounding code block or by any\\ncode block that directly or indirectly invoked the code block where\\nthe error occurred.\\n\\nThe Python interpreter raises an exception when it detects a run-time\\nerror (such as division by zero). A Python program can also\\nexplicitly raise an exception with the ``raise`` statement. Exception\\nhandlers are specified with the ``try`` ... ``except`` statement. The\\n``finally`` clause of such a statement can be used to specify cleanup\\ncode which does not handle the exception, but is executed whether an\\nexception occurred or not in the preceding code.\\n\\nPython uses the \"termination\" model of error handling: an exception\\nhandler can find out what happened and continue execution at an outer\\nlevel, but it cannot repair the cause of the error and retry the\\nfailing operation (except by re-entering the offending piece of code\\nfrom the top).\\n\\nWhen an exception is not handled at all, the interpreter terminates\\nexecution of the program, or returns to its interactive main loop. In\\neither case, it prints a stack backtrace, except when the exception is\\n``SystemExit``.\\n\\nExceptions are identified by class instances. The ``except`` clause\\nis selected depending on the class of the instance: it must reference\\nthe class of the instance or a base class thereof. The instance can\\nbe received by the handler and can carry additional information about\\nthe exceptional condition.\\n\\nNote: Exception messages are not part of the Python API. Their contents\\n may change from one version of Python to the next without warning\\n and should not be relied on by code which will run under multiple\\n versions of the interpreter.\\n\\nSee also the description of the ``try`` statement in section *The try\\nstatement* and ``raise`` statement in section *The raise statement*.\\n\\n-[ Footnotes ]-\\n\\n[1] This limitation occurs because the code that is executed by these\\n operations is not available at the time the module is compiled.\\n',\n'execmodel':'\\nExecution model\\n***************\\n\\n\\nNaming and binding\\n==================\\n\\n*Names* refer to objects. Names are introduced by name binding\\noperations. Each occurrence of a name in the program text refers to\\nthe *binding* of that name established in the innermost function block\\ncontaining the use.\\n\\nA *block* is a piece of Python program text that is executed as a\\nunit. The following are blocks: a module, a function body, and a class\\ndefinition. Each command typed interactively is a block. A script\\nfile (a file given as standard input to the interpreter or specified\\non the interpreter command line the first argument) is a code block.\\nA script command (a command specified on the interpreter command line\\nwith the \\'**-c**\\' option) is a code block. The string argument passed\\nto the built-in functions ``eval()`` and ``exec()`` is a code block.\\n\\nA code block is executed in an *execution frame*. A frame contains\\nsome administrative information (used for debugging) and determines\\nwhere and how execution continues after the code block\\'s execution has\\ncompleted.\\n\\nA *scope* defines the visibility of a name within a block. If a local\\nvariable is defined in a block, its scope includes that block. If the\\ndefinition occurs in a function block, the scope extends to any blocks\\ncontained within the defining one, unless a contained block introduces\\na different binding for the name. The scope of names defined in a\\nclass block is limited to the class block; it does not extend to the\\ncode blocks of methods -- this includes comprehensions and generator\\nexpressions since they are implemented using a function scope. This\\nmeans that the following will fail:\\n\\n class A:\\n a = 42\\n b = list(a + i for i in range(10))\\n\\nWhen a name is used in a code block, it is resolved using the nearest\\nenclosing scope. The set of all such scopes visible to a code block\\nis called the block\\'s *environment*.\\n\\nIf a name is bound in a block, it is a local variable of that block,\\nunless declared as ``nonlocal``. If a name is bound at the module\\nlevel, it is a global variable. (The variables of the module code\\nblock are local and global.) If a variable is used in a code block\\nbut not defined there, it is a *free variable*.\\n\\nWhen a name is not found at all, a ``NameError`` exception is raised.\\nIf the name refers to a local variable that has not been bound, a\\n``UnboundLocalError`` exception is raised. ``UnboundLocalError`` is a\\nsubclass of ``NameError``.\\n\\nThe following constructs bind names: formal parameters to functions,\\n``import`` statements, class and function definitions (these bind the\\nclass or function name in the defining block), and targets that are\\nidentifiers if occurring in an assignment, ``for`` loop header, or\\nafter ``as`` in a ``with`` statement or ``except`` clause. The\\n``import`` statement of the form ``from ... import *`` binds all names\\ndefined in the imported module, except those beginning with an\\nunderscore. This form may only be used at the module level.\\n\\nA target occurring in a ``del`` statement is also considered bound for\\nthis purpose (though the actual semantics are to unbind the name).\\n\\nEach assignment or import statement occurs within a block defined by a\\nclass or function definition or at the module level (the top-level\\ncode block).\\n\\nIf a name binding operation occurs anywhere within a code block, all\\nuses of the name within the block are treated as references to the\\ncurrent block. This can lead to errors when a name is used within a\\nblock before it is bound. This rule is subtle. Python lacks\\ndeclarations and allows name binding operations to occur anywhere\\nwithin a code block. The local variables of a code block can be\\ndetermined by scanning the entire text of the block for name binding\\noperations.\\n\\nIf the ``global`` statement occurs within a block, all uses of the\\nname specified in the statement refer to the binding of that name in\\nthe top-level namespace. Names are resolved in the top-level\\nnamespace by searching the global namespace, i.e. the namespace of the\\nmodule containing the code block, and the builtins namespace, the\\nnamespace of the module ``builtins``. The global namespace is\\nsearched first. If the name is not found there, the builtins\\nnamespace is searched. The global statement must precede all uses of\\nthe name.\\n\\nThe builtins namespace associated with the execution of a code block\\nis actually found by looking up the name ``__builtins__`` in its\\nglobal namespace; this should be a dictionary or a module (in the\\nlatter case the module\\'s dictionary is used). By default, when in the\\n``__main__`` module, ``__builtins__`` is the built-in module\\n``builtins``; when in any other module, ``__builtins__`` is an alias\\nfor the dictionary of the ``builtins`` module itself.\\n``__builtins__`` can be set to a user-created dictionary to create a\\nweak form of restricted execution.\\n\\n**CPython implementation detail:** Users should not touch\\n``__builtins__``; it is strictly an implementation detail. Users\\nwanting to override values in the builtins namespace should ``import``\\nthe ``builtins`` module and modify its attributes appropriately.\\n\\nThe namespace for a module is automatically created the first time a\\nmodule is imported. The main module for a script is always called\\n``__main__``.\\n\\nThe ``global`` statement has the same scope as a name binding\\noperation in the same block. If the nearest enclosing scope for a\\nfree variable contains a global statement, the free variable is\\ntreated as a global.\\n\\nA class definition is an executable statement that may use and define\\nnames. These references follow the normal rules for name resolution.\\nThe namespace of the class definition becomes the attribute dictionary\\nof the class. Names defined at the class scope are not visible in\\nmethods.\\n\\n\\nInteraction with dynamic features\\n---------------------------------\\n\\nThere are several cases where Python statements are illegal when used\\nin conjunction with nested scopes that contain free variables.\\n\\nIf a variable is referenced in an enclosing scope, it is illegal to\\ndelete the name. An error will be reported at compile time.\\n\\nIf the wild card form of import --- ``import *`` --- is used in a\\nfunction and the function contains or is a nested block with free\\nvariables, the compiler will raise a ``SyntaxError``.\\n\\nThe ``eval()`` and ``exec()`` functions do not have access to the full\\nenvironment for resolving names. Names may be resolved in the local\\nand global namespaces of the caller. Free variables are not resolved\\nin the nearest enclosing namespace, but in the global namespace. [1]\\nThe ``exec()`` and ``eval()`` functions have optional arguments to\\noverride the global and local namespace. If only one namespace is\\nspecified, it is used for both.\\n\\n\\nExceptions\\n==========\\n\\nExceptions are a means of breaking out of the normal flow of control\\nof a code block in order to handle errors or other exceptional\\nconditions. An exception is *raised* at the point where the error is\\ndetected; it may be *handled* by the surrounding code block or by any\\ncode block that directly or indirectly invoked the code block where\\nthe error occurred.\\n\\nThe Python interpreter raises an exception when it detects a run-time\\nerror (such as division by zero). A Python program can also\\nexplicitly raise an exception with the ``raise`` statement. Exception\\nhandlers are specified with the ``try`` ... ``except`` statement. The\\n``finally`` clause of such a statement can be used to specify cleanup\\ncode which does not handle the exception, but is executed whether an\\nexception occurred or not in the preceding code.\\n\\nPython uses the \"termination\" model of error handling: an exception\\nhandler can find out what happened and continue execution at an outer\\nlevel, but it cannot repair the cause of the error and retry the\\nfailing operation (except by re-entering the offending piece of code\\nfrom the top).\\n\\nWhen an exception is not handled at all, the interpreter terminates\\nexecution of the program, or returns to its interactive main loop. In\\neither case, it prints a stack backtrace, except when the exception is\\n``SystemExit``.\\n\\nExceptions are identified by class instances. The ``except`` clause\\nis selected depending on the class of the instance: it must reference\\nthe class of the instance or a base class thereof. The instance can\\nbe received by the handler and can carry additional information about\\nthe exceptional condition.\\n\\nNote: Exception messages are not part of the Python API. Their contents\\n may change from one version of Python to the next without warning\\n and should not be relied on by code which will run under multiple\\n versions of the interpreter.\\n\\nSee also the description of the ``try`` statement in section *The try\\nstatement* and ``raise`` statement in section *The raise statement*.\\n\\n-[ Footnotes ]-\\n\\n[1] This limitation occurs because the code that is executed by these\\n operations is not available at the time the module is compiled.\\n',\n'exprlists':'\\nExpression lists\\n****************\\n\\n expression_list ::= expression ( \",\" expression )* [\",\"]\\n\\nAn expression list containing at least one comma yields a tuple. The\\nlength of the tuple is the number of expressions in the list. The\\nexpressions are evaluated from left to right.\\n\\nThe trailing comma is required only to create a single tuple (a.k.a. a\\n*singleton*); it is optional in all other cases. A single expression\\nwithout a trailing comma doesn\\'t create a tuple, but rather yields the\\nvalue of that expression. (To create an empty tuple, use an empty pair\\nof parentheses: ``()``.)\\n',\n'floating':'\\nFloating point literals\\n***********************\\n\\nFloating point literals are described by the following lexical\\ndefinitions:\\n\\n floatnumber ::= pointfloat | exponentfloat\\n pointfloat ::= [intpart] fraction | intpart \".\"\\n exponentfloat ::= (intpart | pointfloat) exponent\\n intpart ::= digit+\\n fraction ::= \".\" digit+\\n exponent ::= (\"e\" | \"E\") [\"+\" | \"-\"] digit+\\n\\nNote that the integer and exponent parts are always interpreted using\\nradix 10. For example, ``077e010`` is legal, and denotes the same\\nnumber as ``77e10``. The allowed range of floating point literals is\\nimplementation-dependent. Some examples of floating point literals:\\n\\n 3.14 10. .001 1e100 3.14e-10 0e0\\n\\nNote that numeric literals do not include a sign; a phrase like ``-1``\\nis actually an expression composed of the unary operator ``-`` and the\\nliteral ``1``.\\n',\n'for':'\\nThe ``for`` statement\\n*********************\\n\\nThe ``for`` statement is used to iterate over the elements of a\\nsequence (such as a string, tuple or list) or other iterable object:\\n\\n for_stmt ::= \"for\" target_list \"in\" expression_list \":\" suite\\n [\"else\" \":\" suite]\\n\\nThe expression list is evaluated once; it should yield an iterable\\nobject. An iterator is created for the result of the\\n``expression_list``. The suite is then executed once for each item\\nprovided by the iterator, in the order of ascending indices. Each\\nitem in turn is assigned to the target list using the standard rules\\nfor assignments (see *Assignment statements*), and then the suite is\\nexecuted. When the items are exhausted (which is immediately when the\\nsequence is empty or an iterator raises a ``StopIteration``\\nexception), the suite in the ``else`` clause, if present, is executed,\\nand the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ncontinues with the next item, or with the ``else`` clause if there was\\nno next item.\\n\\nThe suite may assign to the variable(s) in the target list; this does\\nnot affect the next item assigned to it.\\n\\nNames in the target list are not deleted when the loop is finished,\\nbut if the sequence is empty, it will not have been assigned to at all\\nby the loop. Hint: the built-in function ``range()`` returns an\\niterator of integers suitable to emulate the effect of Pascal\\'s ``for\\ni := a to b do``; e.g., ``list(range(3))`` returns the list ``[0, 1,\\n2]``.\\n\\nNote: There is a subtlety when the sequence is being modified by the loop\\n (this can only occur for mutable sequences, i.e. lists). An\\n internal counter is used to keep track of which item is used next,\\n and this is incremented on each iteration. When this counter has\\n reached the length of the sequence the loop terminates. This means\\n that if the suite deletes the current (or a previous) item from the\\n sequence, the next item will be skipped (since it gets the index of\\n the current item which has already been treated). Likewise, if the\\n suite inserts an item in the sequence before the current item, the\\n current item will be treated again the next time through the loop.\\n This can lead to nasty bugs that can be avoided by making a\\n temporary copy using a slice of the whole sequence, e.g.,\\n\\n for x in a[:]:\\n if x < 0: a.remove(x)\\n',\n'formatstrings':'\\nFormat String Syntax\\n********************\\n\\nThe ``str.format()`` method and the ``Formatter`` class share the same\\nsyntax for format strings (although in the case of ``Formatter``,\\nsubclasses can define their own format string syntax).\\n\\nFormat strings contain \"replacement fields\" surrounded by curly braces\\n``{}``. Anything that is not contained in braces is considered literal\\ntext, which is copied unchanged to the output. If you need to include\\na brace character in the literal text, it can be escaped by doubling:\\n``{{`` and ``}}``.\\n\\nThe grammar for a replacement field is as follows:\\n\\n replacement_field ::= \"{\" [field_name] [\"!\" conversion] [\":\" format_spec] \"}\"\\n field_name ::= arg_name (\".\" attribute_name | \"[\" element_index \"]\")*\\n arg_name ::= [identifier | integer]\\n attribute_name ::= identifier\\n element_index ::= integer | index_string\\n index_string ::= +\\n conversion ::= \"r\" | \"s\" | \"a\"\\n format_spec ::= \\n\\nIn less formal terms, the replacement field can start with a\\n*field_name* that specifies the object whose value is to be formatted\\nand inserted into the output instead of the replacement field. The\\n*field_name* is optionally followed by a *conversion* field, which is\\npreceded by an exclamation point ``\\'!\\'``, and a *format_spec*, which\\nis preceded by a colon ``\\':\\'``. These specify a non-default format\\nfor the replacement value.\\n\\nSee also the *Format Specification Mini-Language* section.\\n\\nThe *field_name* itself begins with an *arg_name* that is either a\\nnumber or a keyword. If it\\'s a number, it refers to a positional\\nargument, and if it\\'s a keyword, it refers to a named keyword\\nargument. If the numerical arg_names in a format string are 0, 1, 2,\\n... in sequence, they can all be omitted (not just some) and the\\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\\nBecause *arg_name* is not quote-delimited, it is not possible to\\nspecify arbitrary dictionary keys (e.g., the strings ``\\'10\\'`` or\\n``\\':-]\\'``) within a format string. The *arg_name* can be followed by\\nany number of index or attribute expressions. An expression of the\\nform ``\\'.name\\'`` selects the named attribute using ``getattr()``,\\nwhile an expression of the form ``\\'[index]\\'`` does an index lookup\\nusing ``__getitem__()``.\\n\\nChanged in version 3.1: The positional argument specifiers can be\\nomitted, so ``\\'{} {}\\'`` is equivalent to ``\\'{0} {1}\\'``.\\n\\nSome simple format string examples:\\n\\n \"First, thou shalt count to {0}\" # References first positional argument\\n \"Bring me a {}\" # Implicitly references the first positional argument\\n \"From {} to {}\" # Same as \"From {0} to {1}\"\\n \"My quest is {name}\" # References keyword argument \\'name\\'\\n \"Weight in tons {0.weight}\" # \\'weight\\' attribute of first positional arg\\n \"Units destroyed: {players[0]}\" # First element of keyword argument \\'players\\'.\\n\\nThe *conversion* field causes a type coercion before formatting.\\nNormally, the job of formatting a value is done by the\\n``__format__()`` method of the value itself. However, in some cases\\nit is desirable to force a type to be formatted as a string,\\noverriding its own definition of formatting. By converting the value\\nto a string before calling ``__format__()``, the normal formatting\\nlogic is bypassed.\\n\\nThree conversion flags are currently supported: ``\\'!s\\'`` which calls\\n``str()`` on the value, ``\\'!r\\'`` which calls ``repr()`` and ``\\'!a\\'``\\nwhich calls ``ascii()``.\\n\\nSome examples:\\n\\n \"Harold\\'s a clever {0!s}\" # Calls str() on the argument first\\n \"Bring out the holy {name!r}\" # Calls repr() on the argument first\\n \"More {!a}\" # Calls ascii() on the argument first\\n\\nThe *format_spec* field contains a specification of how the value\\nshould be presented, including such details as field width, alignment,\\npadding, decimal precision and so on. Each value type can define its\\nown \"formatting mini-language\" or interpretation of the *format_spec*.\\n\\nMost built-in types support a common formatting mini-language, which\\nis described in the next section.\\n\\nA *format_spec* field can also include nested replacement fields\\nwithin it. These nested replacement fields can contain only a field\\nname; conversion flags and format specifications are not allowed. The\\nreplacement fields within the format_spec are substituted before the\\n*format_spec* string is interpreted. This allows the formatting of a\\nvalue to be dynamically specified.\\n\\nSee the *Format examples* section for some examples.\\n\\n\\nFormat Specification Mini-Language\\n==================================\\n\\n\"Format specifications\" are used within replacement fields contained\\nwithin a format string to define how individual values are presented\\n(see *Format String Syntax*). They can also be passed directly to the\\nbuilt-in ``format()`` function. Each formattable type may define how\\nthe format specification is to be interpreted.\\n\\nMost built-in types implement the following options for format\\nspecifications, although some of the formatting options are only\\nsupported by the numeric types.\\n\\nA general convention is that an empty format string (``\"\"``) produces\\nthe same result as if you had called ``str()`` on the value. A non-\\nempty format string typically modifies the result.\\n\\nThe general form of a *standard format specifier* is:\\n\\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\\n fill ::= \\n align ::= \"<\" | \">\" | \"=\" | \"^\"\\n sign ::= \"+\" | \"-\" | \" \"\\n width ::= integer\\n precision ::= integer\\n type ::= \"b\" | \"c\" | \"d\" | \"e\" | \"E\" | \"f\" | \"F\" | \"g\" | \"G\" | \"n\" | \"o\" | \"s\" | \"x\" | \"X\" | \"%\"\\n\\nThe *fill* character can be any character other than \\'{\\' or \\'}\\'. The\\npresence of a fill character is signaled by the character following\\nit, which must be one of the alignment options. If the second\\ncharacter of *format_spec* is not a valid alignment option, then it is\\nassumed that both the fill character and the alignment option are\\nabsent.\\n\\nThe meaning of the various alignment options is as follows:\\n\\n +-----------+------------------------------------------------------------+\\n | Option | Meaning |\\n +===========+============================================================+\\n | ``\\'<\\'`` | Forces the field to be left-aligned within the available |\\n | | space (this is the default for most objects). |\\n +-----------+------------------------------------------------------------+\\n | ``\\'>\\'`` | Forces the field to be right-aligned within the available |\\n | | space (this is the default for numbers). |\\n +-----------+------------------------------------------------------------+\\n | ``\\'=\\'`` | Forces the padding to be placed after the sign (if any) |\\n | | but before the digits. This is used for printing fields |\\n | | in the form \\'+000000120\\'. This alignment option is only |\\n | | valid for numeric types. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'^\\'`` | Forces the field to be centered within the available |\\n | | space. |\\n +-----------+------------------------------------------------------------+\\n\\nNote that unless a minimum field width is defined, the field width\\nwill always be the same size as the data to fill it, so that the\\nalignment option has no meaning in this case.\\n\\nThe *sign* option is only valid for number types, and can be one of\\nthe following:\\n\\n +-----------+------------------------------------------------------------+\\n | Option | Meaning |\\n +===========+============================================================+\\n | ``\\'+\\'`` | indicates that a sign should be used for both positive as |\\n | | well as negative numbers. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'-\\'`` | indicates that a sign should be used only for negative |\\n | | numbers (this is the default behavior). |\\n +-----------+------------------------------------------------------------+\\n | space | indicates that a leading space should be used on positive |\\n | | numbers, and a minus sign on negative numbers. |\\n +-----------+------------------------------------------------------------+\\n\\nThe ``\\'#\\'`` option causes the \"alternate form\" to be used for the\\nconversion. The alternate form is defined differently for different\\ntypes. This option is only valid for integer, float, complex and\\nDecimal types. For integers, when binary, octal, or hexadecimal output\\nis used, this option adds the prefix respective ``\\'0b\\'``, ``\\'0o\\'``, or\\n``\\'0x\\'`` to the output value. For floats, complex and Decimal the\\nalternate form causes the result of the conversion to always contain a\\ndecimal-point character, even if no digits follow it. Normally, a\\ndecimal-point character appears in the result of these conversions\\nonly if a digit follows it. In addition, for ``\\'g\\'`` and ``\\'G\\'``\\nconversions, trailing zeros are not removed from the result.\\n\\nThe ``\\',\\'`` option signals the use of a comma for a thousands\\nseparator. For a locale aware separator, use the ``\\'n\\'`` integer\\npresentation type instead.\\n\\nChanged in version 3.1: Added the ``\\',\\'`` option (see also **PEP\\n378**).\\n\\n*width* is a decimal integer defining the minimum field width. If not\\nspecified, then the field width will be determined by the content.\\n\\nPreceding the *width* field by a zero (``\\'0\\'``) character enables\\nsign-aware zero-padding for numeric types. This is equivalent to a\\n*fill* character of ``\\'0\\'`` with an *alignment* type of ``\\'=\\'``.\\n\\nThe *precision* is a decimal number indicating how many digits should\\nbe displayed after the decimal point for a floating point value\\nformatted with ``\\'f\\'`` and ``\\'F\\'``, or before and after the decimal\\npoint for a floating point value formatted with ``\\'g\\'`` or ``\\'G\\'``.\\nFor non-number types the field indicates the maximum field size - in\\nother words, how many characters will be used from the field content.\\nThe *precision* is not allowed for integer values.\\n\\nFinally, the *type* determines how the data should be presented.\\n\\nThe available string presentation types are:\\n\\n +-----------+------------------------------------------------------------+\\n | Type | Meaning |\\n +===========+============================================================+\\n | ``\\'s\\'`` | String format. This is the default type for strings and |\\n | | may be omitted. |\\n +-----------+------------------------------------------------------------+\\n | None | The same as ``\\'s\\'``. |\\n +-----------+------------------------------------------------------------+\\n\\nThe available integer presentation types are:\\n\\n +-----------+------------------------------------------------------------+\\n | Type | Meaning |\\n +===========+============================================================+\\n | ``\\'b\\'`` | Binary format. Outputs the number in base 2. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'c\\'`` | Character. Converts the integer to the corresponding |\\n | | unicode character before printing. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'d\\'`` | Decimal Integer. Outputs the number in base 10. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'o\\'`` | Octal format. Outputs the number in base 8. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'x\\'`` | Hex format. Outputs the number in base 16, using lower- |\\n | | case letters for the digits above 9. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'X\\'`` | Hex format. Outputs the number in base 16, using upper- |\\n | | case letters for the digits above 9. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'n\\'`` | Number. This is the same as ``\\'d\\'``, except that it uses |\\n | | the current locale setting to insert the appropriate |\\n | | number separator characters. |\\n +-----------+------------------------------------------------------------+\\n | None | The same as ``\\'d\\'``. |\\n +-----------+------------------------------------------------------------+\\n\\nIn addition to the above presentation types, integers can be formatted\\nwith the floating point presentation types listed below (except\\n``\\'n\\'`` and None). When doing so, ``float()`` is used to convert the\\ninteger to a floating point number before formatting.\\n\\nThe available presentation types for floating point and decimal values\\nare:\\n\\n +-----------+------------------------------------------------------------+\\n | Type | Meaning |\\n +===========+============================================================+\\n | ``\\'e\\'`` | Exponent notation. Prints the number in scientific |\\n | | notation using the letter \\'e\\' to indicate the exponent. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'E\\'`` | Exponent notation. Same as ``\\'e\\'`` except it uses an upper |\\n | | case \\'E\\' as the separator character. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'f\\'`` | Fixed point. Displays the number as a fixed-point number. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'F\\'`` | Fixed point. Same as ``\\'f\\'``, but converts ``nan`` to |\\n | | ``NAN`` and ``inf`` to ``INF``. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'g\\'`` | General format. For a given precision ``p >= 1``, this |\\n | | rounds the number to ``p`` significant digits and then |\\n | | formats the result in either fixed-point format or in |\\n | | scientific notation, depending on its magnitude. The |\\n | | precise rules are as follows: suppose that the result |\\n | | formatted with presentation type ``\\'e\\'`` and precision |\\n | | ``p-1`` would have exponent ``exp``. Then if ``-4 <= exp |\\n | | < p``, the number is formatted with presentation type |\\n | | ``\\'f\\'`` and precision ``p-1-exp``. Otherwise, the number |\\n | | is formatted with presentation type ``\\'e\\'`` and precision |\\n | | ``p-1``. In both cases insignificant trailing zeros are |\\n | | removed from the significand, and the decimal point is |\\n | | also removed if there are no remaining digits following |\\n | | it. Positive and negative infinity, positive and negative |\\n | | zero, and nans, are formatted as ``inf``, ``-inf``, ``0``, |\\n | | ``-0`` and ``nan`` respectively, regardless of the |\\n | | precision. A precision of ``0`` is treated as equivalent |\\n | | to a precision of ``1``. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'G\\'`` | General format. Same as ``\\'g\\'`` except switches to ``\\'E\\'`` |\\n | | if the number gets too large. The representations of |\\n | | infinity and NaN are uppercased, too. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'n\\'`` | Number. This is the same as ``\\'g\\'``, except that it uses |\\n | | the current locale setting to insert the appropriate |\\n | | number separator characters. |\\n +-----------+------------------------------------------------------------+\\n | ``\\'%\\'`` | Percentage. Multiplies the number by 100 and displays in |\\n | | fixed (``\\'f\\'``) format, followed by a percent sign. |\\n +-----------+------------------------------------------------------------+\\n | None | Similar to ``\\'g\\'``, except with at least one digit past |\\n | | the decimal point and a default precision of 12. This is |\\n | | intended to match ``str()``, except you can add the other |\\n | | format modifiers. |\\n +-----------+------------------------------------------------------------+\\n\\n\\nFormat examples\\n===============\\n\\nThis section contains examples of the new format syntax and comparison\\nwith the old ``%``-formatting.\\n\\nIn most of the cases the syntax is similar to the old\\n``%``-formatting, with the addition of the ``{}`` and with ``:`` used\\ninstead of ``%``. For example, ``\\'%03.2f\\'`` can be translated to\\n``\\'{:03.2f}\\'``.\\n\\nThe new format syntax also supports new and different options, shown\\nin the follow examples.\\n\\nAccessing arguments by position:\\n\\n >>> \\'{0}, {1}, {2}\\'.format(\\'a\\', \\'b\\', \\'c\\')\\n \\'a, b, c\\'\\n >>> \\'{}, {}, {}\\'.format(\\'a\\', \\'b\\', \\'c\\') # 3.1+ only\\n \\'a, b, c\\'\\n >>> \\'{2}, {1}, {0}\\'.format(\\'a\\', \\'b\\', \\'c\\')\\n \\'c, b, a\\'\\n >>> \\'{2}, {1}, {0}\\'.format(*\\'abc\\') # unpacking argument sequence\\n \\'c, b, a\\'\\n >>> \\'{0}{1}{0}\\'.format(\\'abra\\', \\'cad\\') # arguments\\' indices can be repeated\\n \\'abracadabra\\'\\n\\nAccessing arguments by name:\\n\\n >>> \\'Coordinates: {latitude}, {longitude}\\'.format(latitude=\\'37.24N\\', longitude=\\'-115.81W\\')\\n \\'Coordinates: 37.24N, -115.81W\\'\\n >>> coord = {\\'latitude\\': \\'37.24N\\', \\'longitude\\': \\'-115.81W\\'}\\n >>> \\'Coordinates: {latitude}, {longitude}\\'.format(**coord)\\n \\'Coordinates: 37.24N, -115.81W\\'\\n\\nAccessing arguments\\' attributes:\\n\\n >>> c = 3-5j\\n >>> (\\'The complex number {0} is formed from the real part {0.real} \\'\\n ... \\'and the imaginary part {0.imag}.\\').format(c)\\n \\'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\\'\\n >>> class Point:\\n ... def __init__(self, x, y):\\n ... self.x, self.y = x, y\\n ... def __str__(self):\\n ... return \\'Point({self.x}, {self.y})\\'.format(self=self)\\n ...\\n >>> str(Point(4, 2))\\n \\'Point(4, 2)\\'\\n\\nAccessing arguments\\' items:\\n\\n >>> coord = (3, 5)\\n >>> \\'X: {0[0]}; Y: {0[1]}\\'.format(coord)\\n \\'X: 3; Y: 5\\'\\n\\nReplacing ``%s`` and ``%r``:\\n\\n >>> \"repr() shows quotes: {!r}; str() doesn\\'t: {!s}\".format(\\'test1\\', \\'test2\\')\\n \"repr() shows quotes: \\'test1\\'; str() doesn\\'t: test2\"\\n\\nAligning the text and specifying a width:\\n\\n >>> \\'{:<30}\\'.format(\\'left aligned\\')\\n \\'left aligned \\'\\n >>> \\'{:>30}\\'.format(\\'right aligned\\')\\n \\' right aligned\\'\\n >>> \\'{:^30}\\'.format(\\'centered\\')\\n \\' centered \\'\\n >>> \\'{:*^30}\\'.format(\\'centered\\') # use \\'*\\' as a fill char\\n \\'***********centered***********\\'\\n\\nReplacing ``%+f``, ``%-f``, and ``% f`` and specifying a sign:\\n\\n >>> \\'{:+f}; {:+f}\\'.format(3.14, -3.14) # show it always\\n \\'+3.140000; -3.140000\\'\\n >>> \\'{: f}; {: f}\\'.format(3.14, -3.14) # show a space for positive numbers\\n \\' 3.140000; -3.140000\\'\\n >>> \\'{:-f}; {:-f}\\'.format(3.14, -3.14) # show only the minus -- same as \\'{:f}; {:f}\\'\\n \\'3.140000; -3.140000\\'\\n\\nReplacing ``%x`` and ``%o`` and converting the value to different\\nbases:\\n\\n >>> # format also supports binary numbers\\n >>> \"int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}\".format(42)\\n \\'int: 42; hex: 2a; oct: 52; bin: 101010\\'\\n >>> # with 0x, 0o, or 0b as prefix:\\n >>> \"int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}\".format(42)\\n \\'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\\'\\n\\nUsing the comma as a thousands separator:\\n\\n >>> \\'{:,}\\'.format(1234567890)\\n \\'1,234,567,890\\'\\n\\nExpressing a percentage:\\n\\n >>> points = 19\\n >>> total = 22\\n >>> \\'Correct answers: {:.2%}\\'.format(points/total)\\n \\'Correct answers: 86.36%\\'\\n\\nUsing type-specific formatting:\\n\\n >>> import datetime\\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\\n >>> \\'{:%Y-%m-%d %H:%M:%S}\\'.format(d)\\n \\'2010-07-04 12:15:58\\'\\n\\nNesting arguments and more complex examples:\\n\\n >>> for align, text in zip(\\'<^>\\', [\\'left\\', \\'center\\', \\'right\\']):\\n ... \\'{0:{fill}{align}16}\\'.format(text, fill=align, align=align)\\n ...\\n \\'left<<<<<<<<<<<<\\'\\n \\'^^^^^center^^^^^\\'\\n \\'>>>>>>>>>>>right\\'\\n >>>\\n >>> octets = [192, 168, 0, 1]\\n >>> \\'{:02X}{:02X}{:02X}{:02X}\\'.format(*octets)\\n \\'C0A80001\\'\\n >>> int(_, 16)\\n 3232235521\\n >>>\\n >>> width = 5\\n >>> for num in range(5,12): #doctest: +NORMALIZE_WHITESPACE\\n ... for base in \\'dXob\\':\\n ... print(\\'{0:{width}{base}}\\'.format(num, base=base, width=width), end=\\' \\')\\n ... print()\\n ...\\n 5 5 5 101\\n 6 6 6 110\\n 7 7 7 111\\n 8 8 10 1000\\n 9 9 11 1001\\n 10 A 12 1010\\n 11 B 13 1011\\n',\n'function':'\\nFunction definitions\\n********************\\n\\nA function definition defines a user-defined function object (see\\nsection *The standard type hierarchy*):\\n\\n funcdef ::= [decorators] \"def\" funcname \"(\" [parameter_list] \")\" [\"->\" expression] \":\" suite\\n decorators ::= decorator+\\n decorator ::= \"@\" dotted_name [\"(\" [parameter_list [\",\"]] \")\"] NEWLINE\\n dotted_name ::= identifier (\".\" identifier)*\\n parameter_list ::= (defparameter \",\")*\\n ( \"*\" [parameter] (\",\" defparameter)* [\",\" \"**\" parameter]\\n | \"**\" parameter\\n | defparameter [\",\"] )\\n parameter ::= identifier [\":\" expression]\\n defparameter ::= parameter [\"=\" expression]\\n funcname ::= identifier\\n\\nA function definition is an executable statement. Its execution binds\\nthe function name in the current local namespace to a function object\\n(a wrapper around the executable code for the function). This\\nfunction object contains a reference to the current global namespace\\nas the global namespace to be used when the function is called.\\n\\nThe function definition does not execute the function body; this gets\\nexecuted only when the function is called. [3]\\n\\nA function definition may be wrapped by one or more *decorator*\\nexpressions. Decorator expressions are evaluated when the function is\\ndefined, in the scope that contains the function definition. The\\nresult must be a callable, which is invoked with the function object\\nas the only argument. The returned value is bound to the function name\\ninstead of the function object. Multiple decorators are applied in\\nnested fashion. For example, the following code\\n\\n @f1(arg)\\n @f2\\n def func(): pass\\n\\nis equivalent to\\n\\n def func(): pass\\n func = f1(arg)(f2(func))\\n\\nWhen one or more *parameters* have the form *parameter* ``=``\\n*expression*, the function is said to have \"default parameter values.\"\\nFor a parameter with a default value, the corresponding *argument* may\\nbe omitted from a call, in which case the parameter\\'s default value is\\nsubstituted. If a parameter has a default value, all following\\nparameters up until the \"``*``\" must also have a default value ---\\nthis is a syntactic restriction that is not expressed by the grammar.\\n\\n**Default parameter values are evaluated when the function definition\\nis executed.** This means that the expression is evaluated once, when\\nthe function is defined, and that the same \"pre-computed\" value is\\nused for each call. This is especially important to understand when a\\ndefault parameter is a mutable object, such as a list or a dictionary:\\nif the function modifies the object (e.g. by appending an item to a\\nlist), the default value is in effect modified. This is generally not\\nwhat was intended. A way around this is to use ``None`` as the\\ndefault, and explicitly test for it in the body of the function, e.g.:\\n\\n def whats_on_the_telly(penguin=None):\\n if penguin is None:\\n penguin = []\\n penguin.append(\"property of the zoo\")\\n return penguin\\n\\nFunction call semantics are described in more detail in section\\n*Calls*. A function call always assigns values to all parameters\\nmentioned in the parameter list, either from position arguments, from\\nkeyword arguments, or from default values. If the form\\n\"``*identifier``\" is present, it is initialized to a tuple receiving\\nany excess positional parameters, defaulting to the empty tuple. If\\nthe form \"``**identifier``\" is present, it is initialized to a new\\ndictionary receiving any excess keyword arguments, defaulting to a new\\nempty dictionary. Parameters after \"``*``\" or \"``*identifier``\" are\\nkeyword-only parameters and may only be passed used keyword arguments.\\n\\nParameters may have annotations of the form \"``: expression``\"\\nfollowing the parameter name. Any parameter may have an annotation\\neven those of the form ``*identifier`` or ``**identifier``. Functions\\nmay have \"return\" annotation of the form \"``-> expression``\" after the\\nparameter list. These annotations can be any valid Python expression\\nand are evaluated when the function definition is executed.\\nAnnotations may be evaluated in a different order than they appear in\\nthe source code. The presence of annotations does not change the\\nsemantics of a function. The annotation values are available as\\nvalues of a dictionary keyed by the parameters\\' names in the\\n``__annotations__`` attribute of the function object.\\n\\nIt is also possible to create anonymous functions (functions not bound\\nto a name), for immediate use in expressions. This uses lambda forms,\\ndescribed in section *Lambdas*. Note that the lambda form is merely a\\nshorthand for a simplified function definition; a function defined in\\na \"``def``\" statement can be passed around or assigned to another name\\njust like a function defined by a lambda form. The \"``def``\" form is\\nactually more powerful since it allows the execution of multiple\\nstatements and annotations.\\n\\n**Programmer\\'s note:** Functions are first-class objects. A \"``def``\"\\nform executed inside a function definition defines a local function\\nthat can be returned or passed around. Free variables used in the\\nnested function can access the local variables of the function\\ncontaining the def. See section *Naming and binding* for details.\\n\\nSee also:\\n\\n **PEP 3107** - Function Annotations\\n The original specification for function annotations.\\n',\n'global':'\\nThe ``global`` statement\\n************************\\n\\n global_stmt ::= \"global\" identifier (\",\" identifier)*\\n\\nThe ``global`` statement is a declaration which holds for the entire\\ncurrent code block. It means that the listed identifiers are to be\\ninterpreted as globals. It would be impossible to assign to a global\\nvariable without ``global``, although free variables may refer to\\nglobals without being declared global.\\n\\nNames listed in a ``global`` statement must not be used in the same\\ncode block textually preceding that ``global`` statement.\\n\\nNames listed in a ``global`` statement must not be defined as formal\\nparameters or in a ``for`` loop control target, ``class`` definition,\\nfunction definition, or ``import`` statement.\\n\\n**CPython implementation detail:** The current implementation does not\\nenforce the latter two restrictions, but programs should not abuse\\nthis freedom, as future implementations may enforce them or silently\\nchange the meaning of the program.\\n\\n**Programmer\\'s note:** the ``global`` is a directive to the parser.\\nIt applies only to code parsed at the same time as the ``global``\\nstatement. In particular, a ``global`` statement contained in a string\\nor code object supplied to the built-in ``exec()`` function does not\\naffect the code block *containing* the function call, and code\\ncontained in such a string is unaffected by ``global`` statements in\\nthe code containing the function call. The same applies to the\\n``eval()`` and ``compile()`` functions.\\n',\n'id-classes':'\\nReserved classes of identifiers\\n*******************************\\n\\nCertain classes of identifiers (besides keywords) have special\\nmeanings. These classes are identified by the patterns of leading and\\ntrailing underscore characters:\\n\\n``_*``\\n Not imported by ``from module import *``. The special identifier\\n ``_`` is used in the interactive interpreter to store the result of\\n the last evaluation; it is stored in the ``builtins`` module. When\\n not in interactive mode, ``_`` has no special meaning and is not\\n defined. See section *The import statement*.\\n\\n Note: The name ``_`` is often used in conjunction with\\n internationalization; refer to the documentation for the\\n ``gettext`` module for more information on this convention.\\n\\n``__*__``\\n System-defined names. These names are defined by the interpreter\\n and its implementation (including the standard library). Current\\n system names are discussed in the *Special method names* section\\n and elsewhere. More will likely be defined in future versions of\\n Python. *Any* use of ``__*__`` names, in any context, that does\\n not follow explicitly documented use, is subject to breakage\\n without warning.\\n\\n``__*``\\n Class-private names. Names in this category, when used within the\\n context of a class definition, are re-written to use a mangled form\\n to help avoid name clashes between \"private\" attributes of base and\\n derived classes. See section *Identifiers (Names)*.\\n',\n'identifiers':'\\nIdentifiers and keywords\\n************************\\n\\nIdentifiers (also referred to as *names*) are described by the\\nfollowing lexical definitions.\\n\\nThe syntax of identifiers in Python is based on the Unicode standard\\nannex UAX-31, with elaboration and changes as defined below; see also\\n**PEP 3131** for further details.\\n\\nWithin the ASCII range (U+0001..U+007F), the valid characters for\\nidentifiers are the same as in Python 2.x: the uppercase and lowercase\\nletters ``A`` through ``Z``, the underscore ``_`` and, except for the\\nfirst character, the digits ``0`` through ``9``.\\n\\nPython 3.0 introduces additional characters from outside the ASCII\\nrange (see **PEP 3131**). For these characters, the classification\\nuses the version of the Unicode Character Database as included in the\\n``unicodedata`` module.\\n\\nIdentifiers are unlimited in length. Case is significant.\\n\\n identifier ::= xid_start xid_continue*\\n id_start ::= \\n id_continue ::= \\n xid_start ::= \\n xid_continue ::= \\n\\nThe Unicode category codes mentioned above stand for:\\n\\n* *Lu* - uppercase letters\\n\\n* *Ll* - lowercase letters\\n\\n* *Lt* - titlecase letters\\n\\n* *Lm* - modifier letters\\n\\n* *Lo* - other letters\\n\\n* *Nl* - letter numbers\\n\\n* *Mn* - nonspacing marks\\n\\n* *Mc* - spacing combining marks\\n\\n* *Nd* - decimal numbers\\n\\n* *Pc* - connector punctuations\\n\\n* *Other_ID_Start* - explicit list of characters in PropList.txt to\\n support backwards compatibility\\n\\n* *Other_ID_Continue* - likewise\\n\\nAll identifiers are converted into the normal form NFKC while parsing;\\ncomparison of identifiers is based on NFKC.\\n\\nA non-normative HTML file listing all valid identifier characters for\\nUnicode 4.1 can be found at http://www.dcl.hpi.uni-\\npotsdam.de/home/loewis/table-3131.html.\\n\\n\\nKeywords\\n========\\n\\nThe following identifiers are used as reserved words, or *keywords* of\\nthe language, and cannot be used as ordinary identifiers. They must\\nbe spelled exactly as written here:\\n\\n False class finally is return\\n None continue for lambda try\\n True def from nonlocal while\\n and del global not with\\n as elif if or yield\\n assert else import pass\\n break except in raise\\n\\n\\nReserved classes of identifiers\\n===============================\\n\\nCertain classes of identifiers (besides keywords) have special\\nmeanings. These classes are identified by the patterns of leading and\\ntrailing underscore characters:\\n\\n``_*``\\n Not imported by ``from module import *``. The special identifier\\n ``_`` is used in the interactive interpreter to store the result of\\n the last evaluation; it is stored in the ``builtins`` module. When\\n not in interactive mode, ``_`` has no special meaning and is not\\n defined. See section *The import statement*.\\n\\n Note: The name ``_`` is often used in conjunction with\\n internationalization; refer to the documentation for the\\n ``gettext`` module for more information on this convention.\\n\\n``__*__``\\n System-defined names. These names are defined by the interpreter\\n and its implementation (including the standard library). Current\\n system names are discussed in the *Special method names* section\\n and elsewhere. More will likely be defined in future versions of\\n Python. *Any* use of ``__*__`` names, in any context, that does\\n not follow explicitly documented use, is subject to breakage\\n without warning.\\n\\n``__*``\\n Class-private names. Names in this category, when used within the\\n context of a class definition, are re-written to use a mangled form\\n to help avoid name clashes between \"private\" attributes of base and\\n derived classes. See section *Identifiers (Names)*.\\n',\n'if':'\\nThe ``if`` statement\\n********************\\n\\nThe ``if`` statement is used for conditional execution:\\n\\n if_stmt ::= \"if\" expression \":\" suite\\n ( \"elif\" expression \":\" suite )*\\n [\"else\" \":\" suite]\\n\\nIt selects exactly one of the suites by evaluating the expressions one\\nby one until one is found to be true (see section *Boolean operations*\\nfor the definition of true and false); then that suite is executed\\n(and no other part of the ``if`` statement is executed or evaluated).\\nIf all expressions are false, the suite of the ``else`` clause, if\\npresent, is executed.\\n',\n'imaginary':'\\nImaginary literals\\n******************\\n\\nImaginary literals are described by the following lexical definitions:\\n\\n imagnumber ::= (floatnumber | intpart) (\"j\" | \"J\")\\n\\nAn imaginary literal yields a complex number with a real part of 0.0.\\nComplex numbers are represented as a pair of floating point numbers\\nand have the same restrictions on their range. To create a complex\\nnumber with a nonzero real part, add a floating point number to it,\\ne.g., ``(3+4j)``. Some examples of imaginary literals:\\n\\n 3.14j 10.j 10j .001j 1e100j 3.14e-10j\\n',\n'import':'\\nThe ``import`` statement\\n************************\\n\\n import_stmt ::= \"import\" module [\"as\" name] ( \",\" module [\"as\" name] )*\\n | \"from\" relative_module \"import\" identifier [\"as\" name]\\n ( \",\" identifier [\"as\" name] )*\\n | \"from\" relative_module \"import\" \"(\" identifier [\"as\" name]\\n ( \",\" identifier [\"as\" name] )* [\",\"] \")\"\\n | \"from\" module \"import\" \"*\"\\n module ::= (identifier \".\")* identifier\\n relative_module ::= \".\"* module | \".\"+\\n name ::= identifier\\n\\nThe basic import statement (no ``from`` clause) is executed in two\\nsteps:\\n\\n1. find a module, loading and initializing it if necessary\\n\\n2. define a name or names in the local namespace for the scope where\\n the ``import`` statement occurs.\\n\\nWhen the statement contains multiple clauses (separated by commas) the\\ntwo steps are carried out separately for each clause, just as though\\nthe clauses had been separated out into individiual import statements.\\n\\nThe details of the first step, finding and loading modules is\\ndescribed in greater detail in the section on the *import system*,\\nwhich also describes the various types of packages and modules that\\ncan be imported, as well as all the hooks that can be used to\\ncustomize the import system. Note that failures in this step may\\nindicate either that the module could not be located, *or* that an\\nerror occurred while initializing the module, which includes execution\\nof the module\\'s code.\\n\\nIf the requested module is retrieved successfully, it will be made\\navailable in the local namespace in one of three ways:\\n\\n* If the module name is followed by ``as``, then the name following\\n ``as`` is bound directly to the imported module.\\n\\n* If no other name is specified, and the module being imported is a\\n top level module, the module\\'s name is bound in the local namespace\\n as a reference to the imported module\\n\\n* If the module being imported is *not* a top level module, then the\\n name of the top level package that contains the module is bound in\\n the local namespace as a reference to the top level package. The\\n imported module must be accessed using its full qualified name\\n rather than directly\\n\\nThe ``from`` form uses a slightly more complex process:\\n\\n1. find the module specified in the ``from`` clause loading and\\n initializing it if necessary;\\n\\n2. for each of the identifiers specified in the ``import`` clauses:\\n\\n 1. check if the imported module has an attribute by that name\\n\\n 2. if not, attempt to import a submodule with that name and then\\n check the imported module again for that attribute\\n\\n 3. if the attribute is not found, ``ImportError`` is raised.\\n\\n 4. otherwise, a reference to that value is bound in the local\\n namespace, using the name in the ``as`` clause if it is present,\\n otherwise using the attribute name\\n\\nExamples:\\n\\n import foo # foo imported and bound locally\\n import foo.bar.baz # foo.bar.baz imported, foo bound locally\\n import foo.bar.baz as fbb # foo.bar.baz imported and bound as fbb\\n from foo.bar import baz # foo.bar.baz imported and bound as baz\\n from foo import attr # foo imported and foo.attr bound as attr\\n\\nIf the list of identifiers is replaced by a star (``\\'*\\'``), all public\\nnames defined in the module are bound in the local namespace for the\\nscope where the ``import`` statement occurs.\\n\\nThe *public names* defined by a module are determined by checking the\\nmodule\\'s namespace for a variable named ``__all__``; if defined, it\\nmust be a sequence of strings which are names defined or imported by\\nthat module. The names given in ``__all__`` are all considered public\\nand are required to exist. If ``__all__`` is not defined, the set of\\npublic names includes all names found in the module\\'s namespace which\\ndo not begin with an underscore character (``\\'_\\'``). ``__all__``\\nshould contain the entire public API. It is intended to avoid\\naccidentally exporting items that are not part of the API (such as\\nlibrary modules which were imported and used within the module).\\n\\nThe ``from`` form with ``*`` may only occur in a module scope.\\nAttempting to use it in class or function definitions will raise a\\n``SyntaxError``.\\n\\nThe *public names* defined by a module are determined by checking the\\nmodule\\'s namespace for a variable named ``__all__``; if defined, it\\nmust be a sequence of strings which are names defined or imported by\\nthat module. The names given in ``__all__`` are all considered public\\nand are required to exist. If ``__all__`` is not defined, the set of\\npublic names includes all names found in the module\\'s namespace which\\ndo not begin with an underscore character (``\\'_\\'``). ``__all__``\\nshould contain the entire public API. It is intended to avoid\\naccidentally exporting items that are not part of the API (such as\\nlibrary modules which were imported and used within the module).\\n\\nThe ``from`` form with ``*`` may only occur in a module scope. The\\nwild card form of import --- ``import *`` --- is only allowed at the\\nmodule level. Attempting to use it in class or function definitions\\nwill raise a ``SyntaxError``.\\n\\nWhen specifying what module to import you do not have to specify the\\nabsolute name of the module. When a module or package is contained\\nwithin another package it is possible to make a relative import within\\nthe same top package without having to mention the package name. By\\nusing leading dots in the specified module or package after ``from``\\nyou can specify how high to traverse up the current package hierarchy\\nwithout specifying exact names. One leading dot means the current\\npackage where the module making the import exists. Two dots means up\\none package level. Three dots is up two levels, etc. So if you execute\\n``from . import mod`` from a module in the ``pkg`` package then you\\nwill end up importing ``pkg.mod``. If you execute ``from ..subpkg2\\nimport mod`` from within ``pkg.subpkg1`` you will import\\n``pkg.subpkg2.mod``. The specification for relative imports is\\ncontained within **PEP 328**.\\n\\n``importlib.import_module()`` is provided to support applications that\\ndetermine which modules need to be loaded dynamically.\\n\\n\\nFuture statements\\n=================\\n\\nA *future statement* is a directive to the compiler that a particular\\nmodule should be compiled using syntax or semantics that will be\\navailable in a specified future release of Python. The future\\nstatement is intended to ease migration to future versions of Python\\nthat introduce incompatible changes to the language. It allows use of\\nthe new features on a per-module basis before the release in which the\\nfeature becomes standard.\\n\\n future_statement ::= \"from\" \"__future__\" \"import\" feature [\"as\" name]\\n (\",\" feature [\"as\" name])*\\n | \"from\" \"__future__\" \"import\" \"(\" feature [\"as\" name]\\n (\",\" feature [\"as\" name])* [\",\"] \")\"\\n feature ::= identifier\\n name ::= identifier\\n\\nA future statement must appear near the top of the module. The only\\nlines that can appear before a future statement are:\\n\\n* the module docstring (if any),\\n\\n* comments,\\n\\n* blank lines, and\\n\\n* other future statements.\\n\\nThe features recognized by Python 3.0 are ``absolute_import``,\\n``division``, ``generators``, ``unicode_literals``,\\n``print_function``, ``nested_scopes`` and ``with_statement``. They\\nare all redundant because they are always enabled, and only kept for\\nbackwards compatibility.\\n\\nA future statement is recognized and treated specially at compile\\ntime: Changes to the semantics of core constructs are often\\nimplemented by generating different code. It may even be the case\\nthat a new feature introduces new incompatible syntax (such as a new\\nreserved word), in which case the compiler may need to parse the\\nmodule differently. Such decisions cannot be pushed off until\\nruntime.\\n\\nFor any given release, the compiler knows which feature names have\\nbeen defined, and raises a compile-time error if a future statement\\ncontains a feature not known to it.\\n\\nThe direct runtime semantics are the same as for any import statement:\\nthere is a standard module ``__future__``, described later, and it\\nwill be imported in the usual way at the time the future statement is\\nexecuted.\\n\\nThe interesting runtime semantics depend on the specific feature\\nenabled by the future statement.\\n\\nNote that there is nothing special about the statement:\\n\\n import __future__ [as name]\\n\\nThat is not a future statement; it\\'s an ordinary import statement with\\nno special semantics or syntax restrictions.\\n\\nCode compiled by calls to the built-in functions ``exec()`` and\\n``compile()`` that occur in a module ``M`` containing a future\\nstatement will, by default, use the new syntax or semantics associated\\nwith the future statement. This can be controlled by optional\\narguments to ``compile()`` --- see the documentation of that function\\nfor details.\\n\\nA future statement typed at an interactive interpreter prompt will\\ntake effect for the rest of the interpreter session. If an\\ninterpreter is started with the *-i* option, is passed a script name\\nto execute, and the script includes a future statement, it will be in\\neffect in the interactive session started after the script is\\nexecuted.\\n\\nSee also:\\n\\n **PEP 236** - Back to the __future__\\n The original proposal for the __future__ mechanism.\\n',\n'in':'\\nComparisons\\n***********\\n\\nUnlike C, all comparison operations in Python have the same priority,\\nwhich is lower than that of any arithmetic, shifting or bitwise\\noperation. Also unlike C, expressions like ``a < b < c`` have the\\ninterpretation that is conventional in mathematics:\\n\\n comparison ::= or_expr ( comp_operator or_expr )*\\n comp_operator ::= \"<\" | \">\" | \"==\" | \">=\" | \"<=\" | \"!=\"\\n | \"is\" [\"not\"] | [\"not\"] \"in\"\\n\\nComparisons yield boolean values: ``True`` or ``False``.\\n\\nComparisons can be chained arbitrarily, e.g., ``x < y <= z`` is\\nequivalent to ``x < y and y <= z``, except that ``y`` is evaluated\\nonly once (but in both cases ``z`` is not evaluated at all when ``x <\\ny`` is found to be false).\\n\\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\\n*op2*, ..., *opN* are comparison operators, then ``a op1 b op2 c ... y\\nopN z`` is equivalent to ``a op1 b and b op2 c and ... y opN z``,\\nexcept that each expression is evaluated at most once.\\n\\nNote that ``a op1 b op2 c`` doesn\\'t imply any kind of comparison\\nbetween *a* and *c*, so that, e.g., ``x < y > z`` is perfectly legal\\n(though perhaps not pretty).\\n\\nThe operators ``<``, ``>``, ``==``, ``>=``, ``<=``, and ``!=`` compare\\nthe values of two objects. The objects need not have the same type.\\nIf both are numbers, they are converted to a common type. Otherwise,\\nthe ``==`` and ``!=`` operators *always* consider objects of different\\ntypes to be unequal, while the ``<``, ``>``, ``>=`` and ``<=``\\noperators raise a ``TypeError`` when comparing objects of different\\ntypes that do not implement these operators for the given pair of\\ntypes. You can control comparison behavior of objects of non-built-in\\ntypes by defining rich comparison methods like ``__gt__()``, described\\nin section *Basic customization*.\\n\\nComparison of objects of the same type depends on the type:\\n\\n* Numbers are compared arithmetically.\\n\\n* The values ``float(\\'NaN\\')`` and ``Decimal(\\'NaN\\')`` are special. The\\n are identical to themselves, ``x is x`` but are not equal to\\n themselves, ``x != x``. Additionally, comparing any value to a\\n not-a-number value will return ``False``. For example, both ``3 <\\n float(\\'NaN\\')`` and ``float(\\'NaN\\') < 3`` will return ``False``.\\n\\n* Bytes objects are compared lexicographically using the numeric\\n values of their elements.\\n\\n* Strings are compared lexicographically using the numeric equivalents\\n (the result of the built-in function ``ord()``) of their characters.\\n [3] String and bytes object can\\'t be compared!\\n\\n* Tuples and lists are compared lexicographically using comparison of\\n corresponding elements. This means that to compare equal, each\\n element must compare equal and the two sequences must be of the same\\n type and have the same length.\\n\\n If not equal, the sequences are ordered the same as their first\\n differing elements. For example, ``[1,2,x] <= [1,2,y]`` has the\\n same value as ``x <= y``. If the corresponding element does not\\n exist, the shorter sequence is ordered first (for example, ``[1,2] <\\n [1,2,3]``).\\n\\n* Mappings (dictionaries) compare equal if and only if they have the\\n same ``(key, value)`` pairs. Order comparisons ``(\\'<\\', \\'<=\\', \\'>=\\',\\n \\'>\\')`` raise ``TypeError``.\\n\\n* Sets and frozensets define comparison operators to mean subset and\\n superset tests. Those relations do not define total orderings (the\\n two sets ``{1,2}`` and {2,3} are not equal, nor subsets of one\\n another, nor supersets of one another). Accordingly, sets are not\\n appropriate arguments for functions which depend on total ordering.\\n For example, ``min()``, ``max()``, and ``sorted()`` produce\\n undefined results given a list of sets as inputs.\\n\\n* Most other objects of built-in types compare unequal unless they are\\n the same object; the choice whether one object is considered smaller\\n or larger than another one is made arbitrarily but consistently\\n within one execution of a program.\\n\\nComparison of objects of the differing types depends on whether either\\nof the types provide explicit support for the comparison. Most\\nnumeric types can be compared with one another. When cross-type\\ncomparison is not supported, the comparison method returns\\n``NotImplemented``.\\n\\nThe operators ``in`` and ``not in`` test for membership. ``x in s``\\nevaluates to true if *x* is a member of *s*, and false otherwise. ``x\\nnot in s`` returns the negation of ``x in s``. All built-in sequences\\nand set types support this as well as dictionary, for which ``in``\\ntests whether a the dictionary has a given key. For container types\\nsuch as list, tuple, set, frozenset, dict, or collections.deque, the\\nexpression ``x in y`` is equivalent to ``any(x is e or x == e for e in\\ny)``.\\n\\nFor the string and bytes types, ``x in y`` is true if and only if *x*\\nis a substring of *y*. An equivalent test is ``y.find(x) != -1``.\\nEmpty strings are always considered to be a substring of any other\\nstring, so ``\"\" in \"abc\"`` will return ``True``.\\n\\nFor user-defined classes which define the ``__contains__()`` method,\\n``x in y`` is true if and only if ``y.__contains__(x)`` is true.\\n\\nFor user-defined classes which do not define ``__contains__()`` but do\\ndefine ``__iter__()``, ``x in y`` is true if some value ``z`` with ``x\\n== z`` is produced while iterating over ``y``. If an exception is\\nraised during the iteration, it is as if ``in`` raised that exception.\\n\\nLastly, the old-style iteration protocol is tried: if a class defines\\n``__getitem__()``, ``x in y`` is true if and only if there is a non-\\nnegative integer index *i* such that ``x == y[i]``, and all lower\\ninteger indices do not raise ``IndexError`` exception. (If any other\\nexception is raised, it is as if ``in`` raised that exception).\\n\\nThe operator ``not in`` is defined to have the inverse true value of\\n``in``.\\n\\nThe operators ``is`` and ``is not`` test for object identity: ``x is\\ny`` is true if and only if *x* and *y* are the same object. ``x is\\nnot y`` yields the inverse truth value. [4]\\n',\n'integers':'\\nInteger literals\\n****************\\n\\nInteger literals are described by the following lexical definitions:\\n\\n integer ::= decimalinteger | octinteger | hexinteger | bininteger\\n decimalinteger ::= nonzerodigit digit* | \"0\"+\\n nonzerodigit ::= \"1\"...\"9\"\\n digit ::= \"0\"...\"9\"\\n octinteger ::= \"0\" (\"o\" | \"O\") octdigit+\\n hexinteger ::= \"0\" (\"x\" | \"X\") hexdigit+\\n bininteger ::= \"0\" (\"b\" | \"B\") bindigit+\\n octdigit ::= \"0\"...\"7\"\\n hexdigit ::= digit | \"a\"...\"f\" | \"A\"...\"F\"\\n bindigit ::= \"0\" | \"1\"\\n\\nThere is no limit for the length of integer literals apart from what\\ncan be stored in available memory.\\n\\nNote that leading zeros in a non-zero decimal number are not allowed.\\nThis is for disambiguation with C-style octal literals, which Python\\nused before version 3.0.\\n\\nSome examples of integer literals:\\n\\n 7 2147483647 0o177 0b100110111\\n 3 79228162514264337593543950336 0o377 0x100000000\\n 79228162514264337593543950336 0xdeadbeef\\n',\n'lambda':'\\nLambdas\\n*******\\n\\n lambda_form ::= \"lambda\" [parameter_list]: expression\\n lambda_form_nocond ::= \"lambda\" [parameter_list]: expression_nocond\\n\\nLambda forms (lambda expressions) have the same syntactic position as\\nexpressions. They are a shorthand to create anonymous functions; the\\nexpression ``lambda arguments: expression`` yields a function object.\\nThe unnamed object behaves like a function object defined with\\n\\n def (arguments):\\n return expression\\n\\nSee section *Function definitions* for the syntax of parameter lists.\\nNote that functions created with lambda forms cannot contain\\nstatements or annotations.\\n',\n'lists':'\\nList displays\\n*************\\n\\nA list display is a possibly empty series of expressions enclosed in\\nsquare brackets:\\n\\n list_display ::= \"[\" [expression_list | comprehension] \"]\"\\n\\nA list display yields a new list object, the contents being specified\\nby either a list of expressions or a comprehension. When a comma-\\nseparated list of expressions is supplied, its elements are evaluated\\nfrom left to right and placed into the list object in that order.\\nWhen a comprehension is supplied, the list is constructed from the\\nelements resulting from the comprehension.\\n',\n'naming':\"\\nNaming and binding\\n******************\\n\\n*Names* refer to objects. Names are introduced by name binding\\noperations. Each occurrence of a name in the program text refers to\\nthe *binding* of that name established in the innermost function block\\ncontaining the use.\\n\\nA *block* is a piece of Python program text that is executed as a\\nunit. The following are blocks: a module, a function body, and a class\\ndefinition. Each command typed interactively is a block. A script\\nfile (a file given as standard input to the interpreter or specified\\non the interpreter command line the first argument) is a code block.\\nA script command (a command specified on the interpreter command line\\nwith the '**-c**' option) is a code block. The string argument passed\\nto the built-in functions ``eval()`` and ``exec()`` is a code block.\\n\\nA code block is executed in an *execution frame*. A frame contains\\nsome administrative information (used for debugging) and determines\\nwhere and how execution continues after the code block's execution has\\ncompleted.\\n\\nA *scope* defines the visibility of a name within a block. If a local\\nvariable is defined in a block, its scope includes that block. If the\\ndefinition occurs in a function block, the scope extends to any blocks\\ncontained within the defining one, unless a contained block introduces\\na different binding for the name. The scope of names defined in a\\nclass block is limited to the class block; it does not extend to the\\ncode blocks of methods -- this includes comprehensions and generator\\nexpressions since they are implemented using a function scope. This\\nmeans that the following will fail:\\n\\n class A:\\n a = 42\\n b = list(a + i for i in range(10))\\n\\nWhen a name is used in a code block, it is resolved using the nearest\\nenclosing scope. The set of all such scopes visible to a code block\\nis called the block's *environment*.\\n\\nIf a name is bound in a block, it is a local variable of that block,\\nunless declared as ``nonlocal``. If a name is bound at the module\\nlevel, it is a global variable. (The variables of the module code\\nblock are local and global.) If a variable is used in a code block\\nbut not defined there, it is a *free variable*.\\n\\nWhen a name is not found at all, a ``NameError`` exception is raised.\\nIf the name refers to a local variable that has not been bound, a\\n``UnboundLocalError`` exception is raised. ``UnboundLocalError`` is a\\nsubclass of ``NameError``.\\n\\nThe following constructs bind names: formal parameters to functions,\\n``import`` statements, class and function definitions (these bind the\\nclass or function name in the defining block), and targets that are\\nidentifiers if occurring in an assignment, ``for`` loop header, or\\nafter ``as`` in a ``with`` statement or ``except`` clause. The\\n``import`` statement of the form ``from ... import *`` binds all names\\ndefined in the imported module, except those beginning with an\\nunderscore. This form may only be used at the module level.\\n\\nA target occurring in a ``del`` statement is also considered bound for\\nthis purpose (though the actual semantics are to unbind the name).\\n\\nEach assignment or import statement occurs within a block defined by a\\nclass or function definition or at the module level (the top-level\\ncode block).\\n\\nIf a name binding operation occurs anywhere within a code block, all\\nuses of the name within the block are treated as references to the\\ncurrent block. This can lead to errors when a name is used within a\\nblock before it is bound. This rule is subtle. Python lacks\\ndeclarations and allows name binding operations to occur anywhere\\nwithin a code block. The local variables of a code block can be\\ndetermined by scanning the entire text of the block for name binding\\noperations.\\n\\nIf the ``global`` statement occurs within a block, all uses of the\\nname specified in the statement refer to the binding of that name in\\nthe top-level namespace. Names are resolved in the top-level\\nnamespace by searching the global namespace, i.e. the namespace of the\\nmodule containing the code block, and the builtins namespace, the\\nnamespace of the module ``builtins``. The global namespace is\\nsearched first. If the name is not found there, the builtins\\nnamespace is searched. The global statement must precede all uses of\\nthe name.\\n\\nThe builtins namespace associated with the execution of a code block\\nis actually found by looking up the name ``__builtins__`` in its\\nglobal namespace; this should be a dictionary or a module (in the\\nlatter case the module's dictionary is used). By default, when in the\\n``__main__`` module, ``__builtins__`` is the built-in module\\n``builtins``; when in any other module, ``__builtins__`` is an alias\\nfor the dictionary of the ``builtins`` module itself.\\n``__builtins__`` can be set to a user-created dictionary to create a\\nweak form of restricted execution.\\n\\n**CPython implementation detail:** Users should not touch\\n``__builtins__``; it is strictly an implementation detail. Users\\nwanting to override values in the builtins namespace should ``import``\\nthe ``builtins`` module and modify its attributes appropriately.\\n\\nThe namespace for a module is automatically created the first time a\\nmodule is imported. The main module for a script is always called\\n``__main__``.\\n\\nThe ``global`` statement has the same scope as a name binding\\noperation in the same block. If the nearest enclosing scope for a\\nfree variable contains a global statement, the free variable is\\ntreated as a global.\\n\\nA class definition is an executable statement that may use and define\\nnames. These references follow the normal rules for name resolution.\\nThe namespace of the class definition becomes the attribute dictionary\\nof the class. Names defined at the class scope are not visible in\\nmethods.\\n\\n\\nInteraction with dynamic features\\n=================================\\n\\nThere are several cases where Python statements are illegal when used\\nin conjunction with nested scopes that contain free variables.\\n\\nIf a variable is referenced in an enclosing scope, it is illegal to\\ndelete the name. An error will be reported at compile time.\\n\\nIf the wild card form of import --- ``import *`` --- is used in a\\nfunction and the function contains or is a nested block with free\\nvariables, the compiler will raise a ``SyntaxError``.\\n\\nThe ``eval()`` and ``exec()`` functions do not have access to the full\\nenvironment for resolving names. Names may be resolved in the local\\nand global namespaces of the caller. Free variables are not resolved\\nin the nearest enclosing namespace, but in the global namespace. [1]\\nThe ``exec()`` and ``eval()`` functions have optional arguments to\\noverride the global and local namespace. If only one namespace is\\nspecified, it is used for both.\\n\",\n'nonlocal':'\\nThe ``nonlocal`` statement\\n**************************\\n\\n nonlocal_stmt ::= \"nonlocal\" identifier (\",\" identifier)*\\n\\nThe ``nonlocal`` statement causes the listed identifiers to refer to\\npreviously bound variables in the nearest enclosing scope. This is\\nimportant because the default behavior for binding is to search the\\nlocal namespace first. The statement allows encapsulated code to\\nrebind variables outside of the local scope besides the global\\n(module) scope.\\n\\nNames listed in a ``nonlocal`` statement, unlike to those listed in a\\n``global`` statement, must refer to pre-existing bindings in an\\nenclosing scope (the scope in which a new binding should be created\\ncannot be determined unambiguously).\\n\\nNames listed in a ``nonlocal`` statement must not collide with pre-\\nexisting bindings in the local scope.\\n\\nSee also:\\n\\n **PEP 3104** - Access to Names in Outer Scopes\\n The specification for the ``nonlocal`` statement.\\n',\n'numbers':\"\\nNumeric literals\\n****************\\n\\nThere are three types of numeric literals: integers, floating point\\nnumbers, and imaginary numbers. There are no complex literals\\n(complex numbers can be formed by adding a real number and an\\nimaginary number).\\n\\nNote that numeric literals do not include a sign; a phrase like ``-1``\\nis actually an expression composed of the unary operator '``-``' and\\nthe literal ``1``.\\n\",\n'numeric-types':\"\\nEmulating numeric types\\n***********************\\n\\nThe following methods can be defined to emulate numeric objects.\\nMethods corresponding to operations that are not supported by the\\nparticular kind of number implemented (e.g., bitwise operations for\\nnon-integral numbers) should be left undefined.\\n\\nobject.__add__(self, other)\\nobject.__sub__(self, other)\\nobject.__mul__(self, other)\\nobject.__truediv__(self, other)\\nobject.__floordiv__(self, other)\\nobject.__mod__(self, other)\\nobject.__divmod__(self, other)\\nobject.__pow__(self, other[, modulo])\\nobject.__lshift__(self, other)\\nobject.__rshift__(self, other)\\nobject.__and__(self, other)\\nobject.__xor__(self, other)\\nobject.__or__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``). For instance, to evaluate the expression ``x + y``, where\\n *x* is an instance of a class that has an ``__add__()`` method,\\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\\n should not be related to ``__truediv__()``. Note that\\n ``__pow__()`` should be defined to accept an optional third\\n argument if the ternary version of the built-in ``pow()`` function\\n is to be supported.\\n\\n If one of those methods does not support the operation with the\\n supplied arguments, it should return ``NotImplemented``.\\n\\nobject.__radd__(self, other)\\nobject.__rsub__(self, other)\\nobject.__rmul__(self, other)\\nobject.__rtruediv__(self, other)\\nobject.__rfloordiv__(self, other)\\nobject.__rmod__(self, other)\\nobject.__rdivmod__(self, other)\\nobject.__rpow__(self, other)\\nobject.__rlshift__(self, other)\\nobject.__rrshift__(self, other)\\nobject.__rand__(self, other)\\nobject.__rxor__(self, other)\\nobject.__ror__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``) with reflected (swapped) operands. These functions are only\\n called if the left operand does not support the corresponding\\n operation and the operands are of different types. [2] For\\n instance, to evaluate the expression ``x - y``, where *y* is an\\n instance of a class that has an ``__rsub__()`` method,\\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\\n *NotImplemented*.\\n\\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\\n (the coercion rules would become too complicated).\\n\\n Note: If the right operand's type is a subclass of the left operand's\\n type and that subclass provides the reflected method for the\\n operation, this method will be called before the left operand's\\n non-reflected method. This behavior allows subclasses to\\n override their ancestors' operations.\\n\\nobject.__iadd__(self, other)\\nobject.__isub__(self, other)\\nobject.__imul__(self, other)\\nobject.__itruediv__(self, other)\\nobject.__ifloordiv__(self, other)\\nobject.__imod__(self, other)\\nobject.__ipow__(self, other[, modulo])\\nobject.__ilshift__(self, other)\\nobject.__irshift__(self, other)\\nobject.__iand__(self, other)\\nobject.__ixor__(self, other)\\nobject.__ior__(self, other)\\n\\n These methods are called to implement the augmented arithmetic\\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\\n should attempt to do the operation in-place (modifying *self*) and\\n return the result (which could be, but does not have to be,\\n *self*). If a specific method is not defined, the augmented\\n assignment falls back to the normal methods. For instance, to\\n execute the statement ``x += y``, where *x* is an instance of a\\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\\n called. If *x* is an instance of a class that does not define a\\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\\n considered, as with the evaluation of ``x + y``.\\n\\nobject.__neg__(self)\\nobject.__pos__(self)\\nobject.__abs__(self)\\nobject.__invert__(self)\\n\\n Called to implement the unary arithmetic operations (``-``, ``+``,\\n ``abs()`` and ``~``).\\n\\nobject.__complex__(self)\\nobject.__int__(self)\\nobject.__float__(self)\\nobject.__round__(self[, n])\\n\\n Called to implement the built-in functions ``complex()``,\\n ``int()``, ``float()`` and ``round()``. Should return a value of\\n the appropriate type.\\n\\nobject.__index__(self)\\n\\n Called to implement ``operator.index()``. Also called whenever\\n Python needs an integer object (such as in slicing, or in the\\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\\n an integer.\\n\",\n'objects':'\\nObjects, values and types\\n*************************\\n\\n*Objects* are Python\\'s abstraction for data. All data in a Python\\nprogram is represented by objects or by relations between objects. (In\\na sense, and in conformance to Von Neumann\\'s model of a \"stored\\nprogram computer,\" code is also represented by objects.)\\n\\nEvery object has an identity, a type and a value. An object\\'s\\n*identity* never changes once it has been created; you may think of it\\nas the object\\'s address in memory. The \\'``is``\\' operator compares the\\nidentity of two objects; the ``id()`` function returns an integer\\nrepresenting its identity.\\n\\n**CPython implementation detail:** For CPython, ``id(x)`` is the\\nmemory address where ``x`` is stored.\\n\\nAn object\\'s type determines the operations that the object supports\\n(e.g., \"does it have a length?\") and also defines the possible values\\nfor objects of that type. The ``type()`` function returns an object\\'s\\ntype (which is an object itself). Like its identity, an object\\'s\\n*type* is also unchangeable. [1]\\n\\nThe *value* of some objects can change. Objects whose value can\\nchange are said to be *mutable*; objects whose value is unchangeable\\nonce they are created are called *immutable*. (The value of an\\nimmutable container object that contains a reference to a mutable\\nobject can change when the latter\\'s value is changed; however the\\ncontainer is still considered immutable, because the collection of\\nobjects it contains cannot be changed. So, immutability is not\\nstrictly the same as having an unchangeable value, it is more subtle.)\\nAn object\\'s mutability is determined by its type; for instance,\\nnumbers, strings and tuples are immutable, while dictionaries and\\nlists are mutable.\\n\\nObjects are never explicitly destroyed; however, when they become\\nunreachable they may be garbage-collected. An implementation is\\nallowed to postpone garbage collection or omit it altogether --- it is\\na matter of implementation quality how garbage collection is\\nimplemented, as long as no objects are collected that are still\\nreachable.\\n\\n**CPython implementation detail:** CPython currently uses a reference-\\ncounting scheme with (optional) delayed detection of cyclically linked\\ngarbage, which collects most objects as soon as they become\\nunreachable, but is not guaranteed to collect garbage containing\\ncircular references. See the documentation of the ``gc`` module for\\ninformation on controlling the collection of cyclic garbage. Other\\nimplementations act differently and CPython may change. Do not depend\\non immediate finalization of objects when they become unreachable (ex:\\nalways close files).\\n\\nNote that the use of the implementation\\'s tracing or debugging\\nfacilities may keep objects alive that would normally be collectable.\\nAlso note that catching an exception with a \\'``try``...``except``\\'\\nstatement may keep objects alive.\\n\\nSome objects contain references to \"external\" resources such as open\\nfiles or windows. It is understood that these resources are freed\\nwhen the object is garbage-collected, but since garbage collection is\\nnot guaranteed to happen, such objects also provide an explicit way to\\nrelease the external resource, usually a ``close()`` method. Programs\\nare strongly recommended to explicitly close such objects. The\\n\\'``try``...``finally``\\' statement and the \\'``with``\\' statement provide\\nconvenient ways to do this.\\n\\nSome objects contain references to other objects; these are called\\n*containers*. Examples of containers are tuples, lists and\\ndictionaries. The references are part of a container\\'s value. In\\nmost cases, when we talk about the value of a container, we imply the\\nvalues, not the identities of the contained objects; however, when we\\ntalk about the mutability of a container, only the identities of the\\nimmediately contained objects are implied. So, if an immutable\\ncontainer (like a tuple) contains a reference to a mutable object, its\\nvalue changes if that mutable object is changed.\\n\\nTypes affect almost all aspects of object behavior. Even the\\nimportance of object identity is affected in some sense: for immutable\\ntypes, operations that compute new values may actually return a\\nreference to any existing object with the same type and value, while\\nfor mutable objects this is not allowed. E.g., after ``a = 1; b =\\n1``, ``a`` and ``b`` may or may not refer to the same object with the\\nvalue one, depending on the implementation, but after ``c = []; d =\\n[]``, ``c`` and ``d`` are guaranteed to refer to two different,\\nunique, newly created empty lists. (Note that ``c = d = []`` assigns\\nthe same object to both ``c`` and ``d``.)\\n',\n'operator-summary':'\\nOperator precedence\\n*******************\\n\\nThe following table summarizes the operator precedences in Python,\\nfrom lowest precedence (least binding) to highest precedence (most\\nbinding). Operators in the same box have the same precedence. Unless\\nthe syntax is explicitly given, operators are binary. Operators in\\nthe same box group left to right (except for comparisons, including\\ntests, which all have the same precedence and chain from left to right\\n--- see section *Comparisons* --- and exponentiation, which groups\\nfrom right to left).\\n\\n+-------------------------------------------------+---------------------------------------+\\n| Operator | Description |\\n+=================================================+=======================================+\\n| ``lambda`` | Lambda expression |\\n+-------------------------------------------------+---------------------------------------+\\n| ``if`` -- ``else`` | Conditional expression |\\n+-------------------------------------------------+---------------------------------------+\\n| ``or`` | Boolean OR |\\n+-------------------------------------------------+---------------------------------------+\\n| ``and`` | Boolean AND |\\n+-------------------------------------------------+---------------------------------------+\\n| ``not`` ``x`` | Boolean NOT |\\n+-------------------------------------------------+---------------------------------------+\\n| ``in``, ``not in``, ``is``, ``is not``, ``<``, | Comparisons, including membership |\\n| ``<=``, ``>``, ``>=``, ``!=``, ``==`` | tests and identity tests, |\\n+-------------------------------------------------+---------------------------------------+\\n| ``|`` | Bitwise OR |\\n+-------------------------------------------------+---------------------------------------+\\n| ``^`` | Bitwise XOR |\\n+-------------------------------------------------+---------------------------------------+\\n| ``&`` | Bitwise AND |\\n+-------------------------------------------------+---------------------------------------+\\n| ``<<``, ``>>`` | Shifts |\\n+-------------------------------------------------+---------------------------------------+\\n| ``+``, ``-`` | Addition and subtraction |\\n+-------------------------------------------------+---------------------------------------+\\n| ``*``, ``/``, ``//``, ``%`` | Multiplication, division, remainder |\\n| | [5] |\\n+-------------------------------------------------+---------------------------------------+\\n| ``+x``, ``-x``, ``~x`` | Positive, negative, bitwise NOT |\\n+-------------------------------------------------+---------------------------------------+\\n| ``**`` | Exponentiation [6] |\\n+-------------------------------------------------+---------------------------------------+\\n| ``x[index]``, ``x[index:index]``, | Subscription, slicing, call, |\\n| ``x(arguments...)``, ``x.attribute`` | attribute reference |\\n+-------------------------------------------------+---------------------------------------+\\n| ``(expressions...)``, ``[expressions...]``, | Binding or tuple display, list |\\n| ``{key: value...}``, ``{expressions...}`` | display, dictionary display, set |\\n| | display |\\n+-------------------------------------------------+---------------------------------------+\\n\\n-[ Footnotes ]-\\n\\n[1] While ``abs(x%y) < abs(y)`` is true mathematically, for floats it\\n may not be true numerically due to roundoff. For example, and\\n assuming a platform on which a Python float is an IEEE 754 double-\\n precision number, in order that ``-1e-100 % 1e100`` have the same\\n sign as ``1e100``, the computed result is ``-1e-100 + 1e100``,\\n which is numerically exactly equal to ``1e100``. The function\\n ``math.fmod()`` returns a result whose sign matches the sign of\\n the first argument instead, and so returns ``-1e-100`` in this\\n case. Which approach is more appropriate depends on the\\n application.\\n\\n[2] If x is very close to an exact integer multiple of y, it\\'s\\n possible for ``x//y`` to be one larger than ``(x-x%y)//y`` due to\\n rounding. In such cases, Python returns the latter result, in\\n order to preserve that ``divmod(x,y)[0] * y + x % y`` be very\\n close to ``x``.\\n\\n[3] While comparisons between strings make sense at the byte level,\\n they may be counter-intuitive to users. For example, the strings\\n ``\"\\\\u00C7\"`` and ``\"\\\\u0327\\\\u0043\"`` compare differently, even\\n though they both represent the same unicode character (LATIN\\n CAPITAL LETTER C WITH CEDILLA). To compare strings in a human\\n recognizable way, compare using ``unicodedata.normalize()``.\\n\\n[4] Due to automatic garbage-collection, free lists, and the dynamic\\n nature of descriptors, you may notice seemingly unusual behaviour\\n in certain uses of the ``is`` operator, like those involving\\n comparisons between instance methods, or constants. Check their\\n documentation for more info.\\n\\n[5] The ``%`` operator is also used for string formatting; the same\\n precedence applies.\\n\\n[6] The power operator ``**`` binds less tightly than an arithmetic or\\n bitwise unary operator on its right, that is, ``2**-1`` is\\n ``0.5``.\\n',\n'pass':'\\nThe ``pass`` statement\\n**********************\\n\\n pass_stmt ::= \"pass\"\\n\\n``pass`` is a null operation --- when it is executed, nothing happens.\\nIt is useful as a placeholder when a statement is required\\nsyntactically, but no code needs to be executed, for example:\\n\\n def f(arg): pass # a function that does nothing (yet)\\n\\n class C: pass # a class with no methods (yet)\\n',\n'power':'\\nThe power operator\\n******************\\n\\nThe power operator binds more tightly than unary operators on its\\nleft; it binds less tightly than unary operators on its right. The\\nsyntax is:\\n\\n power ::= primary [\"**\" u_expr]\\n\\nThus, in an unparenthesized sequence of power and unary operators, the\\noperators are evaluated from right to left (this does not constrain\\nthe evaluation order for the operands): ``-1**2`` results in ``-1``.\\n\\nThe power operator has the same semantics as the built-in ``pow()``\\nfunction, when called with two arguments: it yields its left argument\\nraised to the power of its right argument. The numeric arguments are\\nfirst converted to a common type, and the result is of that type.\\n\\nFor int operands, the result has the same type as the operands unless\\nthe second argument is negative; in that case, all arguments are\\nconverted to float and a float result is delivered. For example,\\n``10**2`` returns ``100``, but ``10**-2`` returns ``0.01``.\\n\\nRaising ``0.0`` to a negative power results in a\\n``ZeroDivisionError``. Raising a negative number to a fractional power\\nresults in a ``complex`` number. (In earlier versions it raised a\\n``ValueError``.)\\n',\n'raise':'\\nThe ``raise`` statement\\n***********************\\n\\n raise_stmt ::= \"raise\" [expression [\"from\" expression]]\\n\\nIf no expressions are present, ``raise`` re-raises the last exception\\nthat was active in the current scope. If no exception is active in\\nthe current scope, a ``RuntimeError`` exception is raised indicating\\nthat this is an error.\\n\\nOtherwise, ``raise`` evaluates the first expression as the exception\\nobject. It must be either a subclass or an instance of\\n``BaseException``. If it is a class, the exception instance will be\\nobtained when needed by instantiating the class with no arguments.\\n\\nThe *type* of the exception is the exception instance\\'s class, the\\n*value* is the instance itself.\\n\\nA traceback object is normally created automatically when an exception\\nis raised and attached to it as the ``__traceback__`` attribute, which\\nis writable. You can create an exception and set your own traceback in\\none step using the ``with_traceback()`` exception method (which\\nreturns the same exception instance, with its traceback set to its\\nargument), like so:\\n\\n raise Exception(\"foo occurred\").with_traceback(tracebackobj)\\n\\nThe ``from`` clause is used for exception chaining: if given, the\\nsecond *expression* must be another exception class or instance, which\\nwill then be attached to the raised exception as the ``__cause__``\\nattribute (which is writable). If the raised exception is not\\nhandled, both exceptions will be printed:\\n\\n >>> try:\\n ... print(1 / 0)\\n ... except Exception as exc:\\n ... raise RuntimeError(\"Something bad happened\") from exc\\n ...\\n Traceback (most recent call last):\\n File \"\", line 2, in \\n ZeroDivisionError: int division or modulo by zero\\n\\n The above exception was the direct cause of the following exception:\\n\\n Traceback (most recent call last):\\n File \"\", line 4, in \\n RuntimeError: Something bad happened\\n\\nA similar mechanism works implicitly if an exception is raised inside\\nan exception handler: the previous exception is then attached as the\\nnew exception\\'s ``__context__`` attribute:\\n\\n >>> try:\\n ... print(1 / 0)\\n ... except:\\n ... raise RuntimeError(\"Something bad happened\")\\n ...\\n Traceback (most recent call last):\\n File \"\", line 2, in \\n ZeroDivisionError: int division or modulo by zero\\n\\n During handling of the above exception, another exception occurred:\\n\\n Traceback (most recent call last):\\n File \"\", line 4, in \\n RuntimeError: Something bad happened\\n\\nAdditional information on exceptions can be found in section\\n*Exceptions*, and information about handling exceptions is in section\\n*The try statement*.\\n',\n'return':'\\nThe ``return`` statement\\n************************\\n\\n return_stmt ::= \"return\" [expression_list]\\n\\n``return`` may only occur syntactically nested in a function\\ndefinition, not within a nested class definition.\\n\\nIf an expression list is present, it is evaluated, else ``None`` is\\nsubstituted.\\n\\n``return`` leaves the current function call with the expression list\\n(or ``None``) as return value.\\n\\nWhen ``return`` passes control out of a ``try`` statement with a\\n``finally`` clause, that ``finally`` clause is executed before really\\nleaving the function.\\n\\nIn a generator function, the ``return`` statement indicates that the\\ngenerator is done and will cause ``StopIteration`` to be raised. The\\nreturned value (if any) is used as an argument to construct\\n``StopIteration`` and becomes the ``StopIteration.value`` attribute.\\n',\n'sequence-types':\"\\nEmulating container types\\n*************************\\n\\nThe following methods can be defined to implement container objects.\\nContainers usually are sequences (such as lists or tuples) or mappings\\n(like dictionaries), but can represent other containers as well. The\\nfirst set of methods is used either to emulate a sequence or to\\nemulate a mapping; the difference is that for a sequence, the\\nallowable keys should be the integers *k* for which ``0 <= k < N``\\nwhere *N* is the length of the sequence, or slice objects, which\\ndefine a range of items. It is also recommended that mappings provide\\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\\nand ``update()`` behaving similar to those for Python's standard\\ndictionary objects. The ``collections`` module provides a\\n``MutableMapping`` abstract base class to help create those methods\\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\\nlike Python standard list objects. Finally, sequence types should\\nimplement addition (meaning concatenation) and multiplication (meaning\\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\\ndescribed below; they should not define other numerical operators. It\\nis recommended that both mappings and sequences implement the\\n``__contains__()`` method to allow efficient use of the ``in``\\noperator; for mappings, ``in`` should search the mapping's keys; for\\nsequences, it should search through the values. It is further\\nrecommended that both mappings and sequences implement the\\n``__iter__()`` method to allow efficient iteration through the\\ncontainer; for mappings, ``__iter__()`` should be the same as\\n``keys()``; for sequences, it should iterate through the values.\\n\\nobject.__len__(self)\\n\\n Called to implement the built-in function ``len()``. Should return\\n the length of the object, an integer ``>=`` 0. Also, an object\\n that doesn't define a ``__bool__()`` method and whose ``__len__()``\\n method returns zero is considered to be false in a Boolean context.\\n\\nNote: Slicing is done exclusively with the following three methods. A\\n call like\\n\\n a[1:2] = b\\n\\n is translated to\\n\\n a[slice(1, 2, None)] = b\\n\\n and so forth. Missing slice items are always filled in with\\n ``None``.\\n\\nobject.__getitem__(self, key)\\n\\n Called to implement evaluation of ``self[key]``. For sequence\\n types, the accepted keys should be integers and slice objects.\\n Note that the special interpretation of negative indexes (if the\\n class wishes to emulate a sequence type) is up to the\\n ``__getitem__()`` method. If *key* is of an inappropriate type,\\n ``TypeError`` may be raised; if of a value outside the set of\\n indexes for the sequence (after any special interpretation of\\n negative values), ``IndexError`` should be raised. For mapping\\n types, if *key* is missing (not in the container), ``KeyError``\\n should be raised.\\n\\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\\n illegal indexes to allow proper detection of the end of the\\n sequence.\\n\\nobject.__setitem__(self, key, value)\\n\\n Called to implement assignment to ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support changes to the values for keys, or if new keys\\n can be added, or for sequences if elements can be replaced. The\\n same exceptions should be raised for improper *key* values as for\\n the ``__getitem__()`` method.\\n\\nobject.__delitem__(self, key)\\n\\n Called to implement deletion of ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support removal of keys, or for sequences if elements\\n can be removed from the sequence. The same exceptions should be\\n raised for improper *key* values as for the ``__getitem__()``\\n method.\\n\\nobject.__iter__(self)\\n\\n This method is called when an iterator is required for a container.\\n This method should return a new iterator object that can iterate\\n over all the objects in the container. For mappings, it should\\n iterate over the keys of the container, and should also be made\\n available as the method ``keys()``.\\n\\n Iterator objects also need to implement this method; they are\\n required to return themselves. For more information on iterator\\n objects, see *Iterator Types*.\\n\\nobject.__reversed__(self)\\n\\n Called (if present) by the ``reversed()`` built-in to implement\\n reverse iteration. It should return a new iterator object that\\n iterates over all the objects in the container in reverse order.\\n\\n If the ``__reversed__()`` method is not provided, the\\n ``reversed()`` built-in will fall back to using the sequence\\n protocol (``__len__()`` and ``__getitem__()``). Objects that\\n support the sequence protocol should only provide\\n ``__reversed__()`` if they can provide an implementation that is\\n more efficient than the one provided by ``reversed()``.\\n\\nThe membership test operators (``in`` and ``not in``) are normally\\nimplemented as an iteration through a sequence. However, container\\nobjects can supply the following special method with a more efficient\\nimplementation, which also does not require the object be a sequence.\\n\\nobject.__contains__(self, item)\\n\\n Called to implement membership test operators. Should return true\\n if *item* is in *self*, false otherwise. For mapping objects, this\\n should consider the keys of the mapping rather than the values or\\n the key-item pairs.\\n\\n For objects that don't define ``__contains__()``, the membership\\n test first tries iteration via ``__iter__()``, then the old\\n sequence iteration protocol via ``__getitem__()``, see *this\\n section in the language reference*.\\n\",\n'shifting':'\\nShifting operations\\n*******************\\n\\nThe shifting operations have lower priority than the arithmetic\\noperations:\\n\\n shift_expr ::= a_expr | shift_expr ( \"<<\" | \">>\" ) a_expr\\n\\nThese operators accept integers as arguments. They shift the first\\nargument to the left or right by the number of bits given by the\\nsecond argument.\\n\\nA right shift by *n* bits is defined as division by ``pow(2,n)``. A\\nleft shift by *n* bits is defined as multiplication with ``pow(2,n)``.\\n\\nNote: In the current implementation, the right-hand operand is required to\\n be at most ``sys.maxsize``. If the right-hand operand is larger\\n than ``sys.maxsize`` an ``OverflowError`` exception is raised.\\n',\n'slicings':'\\nSlicings\\n********\\n\\nA slicing selects a range of items in a sequence object (e.g., a\\nstring, tuple or list). Slicings may be used as expressions or as\\ntargets in assignment or ``del`` statements. The syntax for a\\nslicing:\\n\\n slicing ::= primary \"[\" slice_list \"]\"\\n slice_list ::= slice_item (\",\" slice_item)* [\",\"]\\n slice_item ::= expression | proper_slice\\n proper_slice ::= [lower_bound] \":\" [upper_bound] [ \":\" [stride] ]\\n lower_bound ::= expression\\n upper_bound ::= expression\\n stride ::= expression\\n\\nThere is ambiguity in the formal syntax here: anything that looks like\\nan expression list also looks like a slice list, so any subscription\\ncan be interpreted as a slicing. Rather than further complicating the\\nsyntax, this is disambiguated by defining that in this case the\\ninterpretation as a subscription takes priority over the\\ninterpretation as a slicing (this is the case if the slice list\\ncontains no proper slice).\\n\\nThe semantics for a slicing are as follows. The primary must evaluate\\nto a mapping object, and it is indexed (using the same\\n``__getitem__()`` method as normal subscription) with a key that is\\nconstructed from the slice list, as follows. If the slice list\\ncontains at least one comma, the key is a tuple containing the\\nconversion of the slice items; otherwise, the conversion of the lone\\nslice item is the key. The conversion of a slice item that is an\\nexpression is that expression. The conversion of a proper slice is a\\nslice object (see section *The standard type hierarchy*) whose\\n``start``, ``stop`` and ``step`` attributes are the values of the\\nexpressions given as lower bound, upper bound and stride,\\nrespectively, substituting ``None`` for missing expressions.\\n',\n'specialattrs':'\\nSpecial Attributes\\n******************\\n\\nThe implementation adds a few special read-only attributes to several\\nobject types, where they are relevant. Some of these are not reported\\nby the ``dir()`` built-in function.\\n\\nobject.__dict__\\n\\n A dictionary or other mapping object used to store an object\\'s\\n (writable) attributes.\\n\\ninstance.__class__\\n\\n The class to which a class instance belongs.\\n\\nclass.__bases__\\n\\n The tuple of base classes of a class object.\\n\\nclass.__name__\\n\\n The name of the class or type.\\n\\nclass.__qualname__\\n\\n The *qualified name* of the class or type.\\n\\n New in version 3.3.\\n\\nclass.__mro__\\n\\n This attribute is a tuple of classes that are considered when\\n looking for base classes during method resolution.\\n\\nclass.mro()\\n\\n This method can be overridden by a metaclass to customize the\\n method resolution order for its instances. It is called at class\\n instantiation, and its result is stored in ``__mro__``.\\n\\nclass.__subclasses__()\\n\\n Each class keeps a list of weak references to its immediate\\n subclasses. This method returns a list of all those references\\n still alive. Example:\\n\\n >>> int.__subclasses__()\\n []\\n\\n-[ Footnotes ]-\\n\\n[1] Additional information on these special methods may be found in\\n the Python Reference Manual (*Basic customization*).\\n\\n[2] As a consequence, the list ``[1, 2]`` is considered equal to\\n ``[1.0, 2.0]``, and similarly for tuples.\\n\\n[3] They must have since the parser can\\'t tell the type of the\\n operands.\\n\\n[4] Cased characters are those with general category property being\\n one of \"Lu\" (Letter, uppercase), \"Ll\" (Letter, lowercase), or \"Lt\"\\n (Letter, titlecase).\\n\\n[5] To format only a tuple you should therefore provide a singleton\\n tuple whose only element is the tuple to be formatted.\\n',\n'specialnames':'\\nSpecial method names\\n********************\\n\\nA class can implement certain operations that are invoked by special\\nsyntax (such as arithmetic operations or subscripting and slicing) by\\ndefining methods with special names. This is Python\\'s approach to\\n*operator overloading*, allowing classes to define their own behavior\\nwith respect to language operators. For instance, if a class defines\\na method named ``__getitem__()``, and ``x`` is an instance of this\\nclass, then ``x[i]`` is roughly equivalent to ``type(x).__getitem__(x,\\ni)``. Except where mentioned, attempts to execute an operation raise\\nan exception when no appropriate method is defined (typically\\n``AttributeError`` or ``TypeError``).\\n\\nWhen implementing a class that emulates any built-in type, it is\\nimportant that the emulation only be implemented to the degree that it\\nmakes sense for the object being modelled. For example, some\\nsequences may work well with retrieval of individual elements, but\\nextracting a slice may not make sense. (One example of this is the\\n``NodeList`` interface in the W3C\\'s Document Object Model.)\\n\\n\\nBasic customization\\n===================\\n\\nobject.__new__(cls[, ...])\\n\\n Called to create a new instance of class *cls*. ``__new__()`` is a\\n static method (special-cased so you need not declare it as such)\\n that takes the class of which an instance was requested as its\\n first argument. The remaining arguments are those passed to the\\n object constructor expression (the call to the class). The return\\n value of ``__new__()`` should be the new object instance (usually\\n an instance of *cls*).\\n\\n Typical implementations create a new instance of the class by\\n invoking the superclass\\'s ``__new__()`` method using\\n ``super(currentclass, cls).__new__(cls[, ...])`` with appropriate\\n arguments and then modifying the newly-created instance as\\n necessary before returning it.\\n\\n If ``__new__()`` returns an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will be invoked like\\n ``__init__(self[, ...])``, where *self* is the new instance and the\\n remaining arguments are the same as were passed to ``__new__()``.\\n\\n If ``__new__()`` does not return an instance of *cls*, then the new\\n instance\\'s ``__init__()`` method will not be invoked.\\n\\n ``__new__()`` is intended mainly to allow subclasses of immutable\\n types (like int, str, or tuple) to customize instance creation. It\\n is also commonly overridden in custom metaclasses in order to\\n customize class creation.\\n\\nobject.__init__(self[, ...])\\n\\n Called when the instance is created. The arguments are those\\n passed to the class constructor expression. If a base class has an\\n ``__init__()`` method, the derived class\\'s ``__init__()`` method,\\n if any, must explicitly call it to ensure proper initialization of\\n the base class part of the instance; for example:\\n ``BaseClass.__init__(self, [args...])``. As a special constraint\\n on constructors, no value may be returned; doing so will cause a\\n ``TypeError`` to be raised at runtime.\\n\\nobject.__del__(self)\\n\\n Called when the instance is about to be destroyed. This is also\\n called a destructor. If a base class has a ``__del__()`` method,\\n the derived class\\'s ``__del__()`` method, if any, must explicitly\\n call it to ensure proper deletion of the base class part of the\\n instance. Note that it is possible (though not recommended!) for\\n the ``__del__()`` method to postpone destruction of the instance by\\n creating a new reference to it. It may then be called at a later\\n time when this new reference is deleted. It is not guaranteed that\\n ``__del__()`` methods are called for objects that still exist when\\n the interpreter exits.\\n\\n Note: ``del x`` doesn\\'t directly call ``x.__del__()`` --- the former\\n decrements the reference count for ``x`` by one, and the latter\\n is only called when ``x``\\'s reference count reaches zero. Some\\n common situations that may prevent the reference count of an\\n object from going to zero include: circular references between\\n objects (e.g., a doubly-linked list or a tree data structure with\\n parent and child pointers); a reference to the object on the\\n stack frame of a function that caught an exception (the traceback\\n stored in ``sys.exc_info()[2]`` keeps the stack frame alive); or\\n a reference to the object on the stack frame that raised an\\n unhandled exception in interactive mode (the traceback stored in\\n ``sys.last_traceback`` keeps the stack frame alive). The first\\n situation can only be remedied by explicitly breaking the cycles;\\n the latter two situations can be resolved by storing ``None`` in\\n ``sys.last_traceback``. Circular references which are garbage are\\n detected when the option cycle detector is enabled (it\\'s on by\\n default), but can only be cleaned up if there are no Python-\\n level ``__del__()`` methods involved. Refer to the documentation\\n for the ``gc`` module for more information about how\\n ``__del__()`` methods are handled by the cycle detector,\\n particularly the description of the ``garbage`` value.\\n\\n Warning: Due to the precarious circumstances under which ``__del__()``\\n methods are invoked, exceptions that occur during their execution\\n are ignored, and a warning is printed to ``sys.stderr`` instead.\\n Also, when ``__del__()`` is invoked in response to a module being\\n deleted (e.g., when execution of the program is done), other\\n globals referenced by the ``__del__()`` method may already have\\n been deleted or in the process of being torn down (e.g. the\\n import machinery shutting down). For this reason, ``__del__()``\\n methods should do the absolute minimum needed to maintain\\n external invariants. Starting with version 1.5, Python\\n guarantees that globals whose name begins with a single\\n underscore are deleted from their module before other globals are\\n deleted; if no other references to such globals exist, this may\\n help in assuring that imported modules are still available at the\\n time when the ``__del__()`` method is called.\\n\\nobject.__repr__(self)\\n\\n Called by the ``repr()`` built-in function to compute the\\n \"official\" string representation of an object. If at all possible,\\n this should look like a valid Python expression that could be used\\n to recreate an object with the same value (given an appropriate\\n environment). If this is not possible, a string of the form\\n ``<...some useful description...>`` should be returned. The return\\n value must be a string object. If a class defines ``__repr__()``\\n but not ``__str__()``, then ``__repr__()`` is also used when an\\n \"informal\" string representation of instances of that class is\\n required.\\n\\n This is typically used for debugging, so it is important that the\\n representation is information-rich and unambiguous.\\n\\nobject.__str__(self)\\n\\n Called by ``str(object)`` and the built-in functions ``format()``\\n and ``print()`` to compute the \"informal\" or nicely printable\\n string representation of an object. The return value must be a\\n *string* object.\\n\\n This method differs from ``object.__repr__()`` in that there is no\\n expectation that ``__str__()`` return a valid Python expression: a\\n more convenient or concise representation can be used.\\n\\n The default implementation defined by the built-in type ``object``\\n calls ``object.__repr__()``.\\n\\nobject.__bytes__(self)\\n\\n Called by ``bytes()`` to compute a byte-string representation of an\\n object. This should return a ``bytes`` object.\\n\\nobject.__format__(self, format_spec)\\n\\n Called by the ``format()`` built-in function (and by extension, the\\n ``str.format()`` method of class ``str``) to produce a \"formatted\"\\n string representation of an object. The ``format_spec`` argument is\\n a string that contains a description of the formatting options\\n desired. The interpretation of the ``format_spec`` argument is up\\n to the type implementing ``__format__()``, however most classes\\n will either delegate formatting to one of the built-in types, or\\n use a similar formatting option syntax.\\n\\n See *Format Specification Mini-Language* for a description of the\\n standard formatting syntax.\\n\\n The return value must be a string object.\\n\\nobject.__lt__(self, other)\\nobject.__le__(self, other)\\nobject.__eq__(self, other)\\nobject.__ne__(self, other)\\nobject.__gt__(self, other)\\nobject.__ge__(self, other)\\n\\n These are the so-called \"rich comparison\" methods. The\\n correspondence between operator symbols and method names is as\\n follows: ``xy`` calls ``x.__gt__(y)``, and ``x>=y`` calls\\n ``x.__ge__(y)``.\\n\\n A rich comparison method may return the singleton\\n ``NotImplemented`` if it does not implement the operation for a\\n given pair of arguments. By convention, ``False`` and ``True`` are\\n returned for a successful comparison. However, these methods can\\n return any value, so if the comparison operator is used in a\\n Boolean context (e.g., in the condition of an ``if`` statement),\\n Python will call ``bool()`` on the value to determine if the result\\n is true or false.\\n\\n There are no implied relationships among the comparison operators.\\n The truth of ``x==y`` does not imply that ``x!=y`` is false.\\n Accordingly, when defining ``__eq__()``, one should also define\\n ``__ne__()`` so that the operators will behave as expected. See\\n the paragraph on ``__hash__()`` for some important notes on\\n creating *hashable* objects which support custom comparison\\n operations and are usable as dictionary keys.\\n\\n There are no swapped-argument versions of these methods (to be used\\n when the left argument does not support the operation but the right\\n argument does); rather, ``__lt__()`` and ``__gt__()`` are each\\n other\\'s reflection, ``__le__()`` and ``__ge__()`` are each other\\'s\\n reflection, and ``__eq__()`` and ``__ne__()`` are their own\\n reflection.\\n\\n Arguments to rich comparison methods are never coerced.\\n\\n To automatically generate ordering operations from a single root\\n operation, see ``functools.total_ordering()``.\\n\\nobject.__hash__(self)\\n\\n Called by built-in function ``hash()`` and for operations on\\n members of hashed collections including ``set``, ``frozenset``, and\\n ``dict``. ``__hash__()`` should return an integer. The only\\n required property is that objects which compare equal have the same\\n hash value; it is advised to somehow mix together (e.g. using\\n exclusive or) the hash values for the components of the object that\\n also play a part in comparison of objects.\\n\\n If a class does not define an ``__eq__()`` method it should not\\n define a ``__hash__()`` operation either; if it defines\\n ``__eq__()`` but not ``__hash__()``, its instances will not be\\n usable as items in hashable collections. If a class defines\\n mutable objects and implements an ``__eq__()`` method, it should\\n not implement ``__hash__()``, since the implementation of hashable\\n collections requires that a key\\'s hash value is immutable (if the\\n object\\'s hash value changes, it will be in the wrong hash bucket).\\n\\n User-defined classes have ``__eq__()`` and ``__hash__()`` methods\\n by default; with them, all objects compare unequal (except with\\n themselves) and ``x.__hash__()`` returns an appropriate value such\\n that ``x == y`` implies both that ``x is y`` and ``hash(x) ==\\n hash(y)``.\\n\\n A class that overrides ``__eq__()`` and does not define\\n ``__hash__()`` will have its ``__hash__()`` implicitly set to\\n ``None``. When the ``__hash__()`` method of a class is ``None``,\\n instances of the class will raise an appropriate ``TypeError`` when\\n a program attempts to retrieve their hash value, and will also be\\n correctly identified as unhashable when checking ``isinstance(obj,\\n collections.Hashable``).\\n\\n If a class that overrides ``__eq__()`` needs to retain the\\n implementation of ``__hash__()`` from a parent class, the\\n interpreter must be told this explicitly by setting ``__hash__ =\\n .__hash__``.\\n\\n If a class that does not override ``__eq__()`` wishes to suppress\\n hash support, it should include ``__hash__ = None`` in the class\\n definition. A class which defines its own ``__hash__()`` that\\n explicitly raises a ``TypeError`` would be incorrectly identified\\n as hashable by an ``isinstance(obj, collections.Hashable)`` call.\\n\\n Note: By default, the ``__hash__()`` values of str, bytes and datetime\\n objects are \"salted\" with an unpredictable random value.\\n Although they remain constant within an individual Python\\n process, they are not predictable between repeated invocations of\\n Python.This is intended to provide protection against a denial-\\n of-service caused by carefully-chosen inputs that exploit the\\n worst case performance of a dict insertion, O(n^2) complexity.\\n See http://www.ocert.org/advisories/ocert-2011-003.html for\\n details.Changing hash values affects the iteration order of\\n dicts, sets and other mappings. Python has never made guarantees\\n about this ordering (and it typically varies between 32-bit and\\n 64-bit builds).See also ``PYTHONHASHSEED``.\\n\\n Changed in version 3.3: Hash randomization is enabled by default.\\n\\nobject.__bool__(self)\\n\\n Called to implement truth value testing and the built-in operation\\n ``bool()``; should return ``False`` or ``True``. When this method\\n is not defined, ``__len__()`` is called, if it is defined, and the\\n object is considered true if its result is nonzero. If a class\\n defines neither ``__len__()`` nor ``__bool__()``, all its instances\\n are considered true.\\n\\n\\nCustomizing attribute access\\n============================\\n\\nThe following methods can be defined to customize the meaning of\\nattribute access (use of, assignment to, or deletion of ``x.name``)\\nfor class instances.\\n\\nobject.__getattr__(self, name)\\n\\n Called when an attribute lookup has not found the attribute in the\\n usual places (i.e. it is not an instance attribute nor is it found\\n in the class tree for ``self``). ``name`` is the attribute name.\\n This method should return the (computed) attribute value or raise\\n an ``AttributeError`` exception.\\n\\n Note that if the attribute is found through the normal mechanism,\\n ``__getattr__()`` is not called. (This is an intentional asymmetry\\n between ``__getattr__()`` and ``__setattr__()``.) This is done both\\n for efficiency reasons and because otherwise ``__getattr__()``\\n would have no way to access other attributes of the instance. Note\\n that at least for instance variables, you can fake total control by\\n not inserting any values in the instance attribute dictionary (but\\n instead inserting them in another object). See the\\n ``__getattribute__()`` method below for a way to actually get total\\n control over attribute access.\\n\\nobject.__getattribute__(self, name)\\n\\n Called unconditionally to implement attribute accesses for\\n instances of the class. If the class also defines\\n ``__getattr__()``, the latter will not be called unless\\n ``__getattribute__()`` either calls it explicitly or raises an\\n ``AttributeError``. This method should return the (computed)\\n attribute value or raise an ``AttributeError`` exception. In order\\n to avoid infinite recursion in this method, its implementation\\n should always call the base class method with the same name to\\n access any attributes it needs, for example,\\n ``object.__getattribute__(self, name)``.\\n\\n Note: This method may still be bypassed when looking up special methods\\n as the result of implicit invocation via language syntax or\\n built-in functions. See *Special method lookup*.\\n\\nobject.__setattr__(self, name, value)\\n\\n Called when an attribute assignment is attempted. This is called\\n instead of the normal mechanism (i.e. store the value in the\\n instance dictionary). *name* is the attribute name, *value* is the\\n value to be assigned to it.\\n\\n If ``__setattr__()`` wants to assign to an instance attribute, it\\n should call the base class method with the same name, for example,\\n ``object.__setattr__(self, name, value)``.\\n\\nobject.__delattr__(self, name)\\n\\n Like ``__setattr__()`` but for attribute deletion instead of\\n assignment. This should only be implemented if ``del obj.name`` is\\n meaningful for the object.\\n\\nobject.__dir__(self)\\n\\n Called when ``dir()`` is called on the object. A sequence must be\\n returned. ``dir()`` converts the returned sequence to a list and\\n sorts it.\\n\\n\\nImplementing Descriptors\\n------------------------\\n\\nThe following methods only apply when an instance of the class\\ncontaining the method (a so-called *descriptor* class) appears in an\\n*owner* class (the descriptor must be in either the owner\\'s class\\ndictionary or in the class dictionary for one of its parents). In the\\nexamples below, \"the attribute\" refers to the attribute whose name is\\nthe key of the property in the owner class\\' ``__dict__``.\\n\\nobject.__get__(self, instance, owner)\\n\\n Called to get the attribute of the owner class (class attribute\\n access) or of an instance of that class (instance attribute\\n access). *owner* is always the owner class, while *instance* is the\\n instance that the attribute was accessed through, or ``None`` when\\n the attribute is accessed through the *owner*. This method should\\n return the (computed) attribute value or raise an\\n ``AttributeError`` exception.\\n\\nobject.__set__(self, instance, value)\\n\\n Called to set the attribute on an instance *instance* of the owner\\n class to a new value, *value*.\\n\\nobject.__delete__(self, instance)\\n\\n Called to delete the attribute on an instance *instance* of the\\n owner class.\\n\\n\\nInvoking Descriptors\\n--------------------\\n\\nIn general, a descriptor is an object attribute with \"binding\\nbehavior\", one whose attribute access has been overridden by methods\\nin the descriptor protocol: ``__get__()``, ``__set__()``, and\\n``__delete__()``. If any of those methods are defined for an object,\\nit is said to be a descriptor.\\n\\nThe default behavior for attribute access is to get, set, or delete\\nthe attribute from an object\\'s dictionary. For instance, ``a.x`` has a\\nlookup chain starting with ``a.__dict__[\\'x\\']``, then\\n``type(a).__dict__[\\'x\\']``, and continuing through the base classes of\\n``type(a)`` excluding metaclasses.\\n\\nHowever, if the looked-up value is an object defining one of the\\ndescriptor methods, then Python may override the default behavior and\\ninvoke the descriptor method instead. Where this occurs in the\\nprecedence chain depends on which descriptor methods were defined and\\nhow they were called.\\n\\nThe starting point for descriptor invocation is a binding, ``a.x``.\\nHow the arguments are assembled depends on ``a``:\\n\\nDirect Call\\n The simplest and least common call is when user code directly\\n invokes a descriptor method: ``x.__get__(a)``.\\n\\nInstance Binding\\n If binding to an object instance, ``a.x`` is transformed into the\\n call: ``type(a).__dict__[\\'x\\'].__get__(a, type(a))``.\\n\\nClass Binding\\n If binding to a class, ``A.x`` is transformed into the call:\\n ``A.__dict__[\\'x\\'].__get__(None, A)``.\\n\\nSuper Binding\\n If ``a`` is an instance of ``super``, then the binding ``super(B,\\n obj).m()`` searches ``obj.__class__.__mro__`` for the base class\\n ``A`` immediately preceding ``B`` and then invokes the descriptor\\n with the call: ``A.__dict__[\\'m\\'].__get__(obj, obj.__class__)``.\\n\\nFor instance bindings, the precedence of descriptor invocation depends\\non the which descriptor methods are defined. A descriptor can define\\nany combination of ``__get__()``, ``__set__()`` and ``__delete__()``.\\nIf it does not define ``__get__()``, then accessing the attribute will\\nreturn the descriptor object itself unless there is a value in the\\nobject\\'s instance dictionary. If the descriptor defines ``__set__()``\\nand/or ``__delete__()``, it is a data descriptor; if it defines\\nneither, it is a non-data descriptor. Normally, data descriptors\\ndefine both ``__get__()`` and ``__set__()``, while non-data\\ndescriptors have just the ``__get__()`` method. Data descriptors with\\n``__set__()`` and ``__get__()`` defined always override a redefinition\\nin an instance dictionary. In contrast, non-data descriptors can be\\noverridden by instances.\\n\\nPython methods (including ``staticmethod()`` and ``classmethod()``)\\nare implemented as non-data descriptors. Accordingly, instances can\\nredefine and override methods. This allows individual instances to\\nacquire behaviors that differ from other instances of the same class.\\n\\nThe ``property()`` function is implemented as a data descriptor.\\nAccordingly, instances cannot override the behavior of a property.\\n\\n\\n__slots__\\n---------\\n\\nBy default, instances of classes have a dictionary for attribute\\nstorage. This wastes space for objects having very few instance\\nvariables. The space consumption can become acute when creating large\\nnumbers of instances.\\n\\nThe default can be overridden by defining *__slots__* in a class\\ndefinition. The *__slots__* declaration takes a sequence of instance\\nvariables and reserves just enough space in each instance to hold a\\nvalue for each variable. Space is saved because *__dict__* is not\\ncreated for each instance.\\n\\nobject.__slots__\\n\\n This class variable can be assigned a string, iterable, or sequence\\n of strings with variable names used by instances. If defined in a\\n class, *__slots__* reserves space for the declared variables and\\n prevents the automatic creation of *__dict__* and *__weakref__* for\\n each instance.\\n\\n\\nNotes on using *__slots__*\\n~~~~~~~~~~~~~~~~~~~~~~~~~~\\n\\n* When inheriting from a class without *__slots__*, the *__dict__*\\n attribute of that class will always be accessible, so a *__slots__*\\n definition in the subclass is meaningless.\\n\\n* Without a *__dict__* variable, instances cannot be assigned new\\n variables not listed in the *__slots__* definition. Attempts to\\n assign to an unlisted variable name raises ``AttributeError``. If\\n dynamic assignment of new variables is desired, then add\\n ``\\'__dict__\\'`` to the sequence of strings in the *__slots__*\\n declaration.\\n\\n* Without a *__weakref__* variable for each instance, classes defining\\n *__slots__* do not support weak references to its instances. If weak\\n reference support is needed, then add ``\\'__weakref__\\'`` to the\\n sequence of strings in the *__slots__* declaration.\\n\\n* *__slots__* are implemented at the class level by creating\\n descriptors (*Implementing Descriptors*) for each variable name. As\\n a result, class attributes cannot be used to set default values for\\n instance variables defined by *__slots__*; otherwise, the class\\n attribute would overwrite the descriptor assignment.\\n\\n* The action of a *__slots__* declaration is limited to the class\\n where it is defined. As a result, subclasses will have a *__dict__*\\n unless they also define *__slots__* (which must only contain names\\n of any *additional* slots).\\n\\n* If a class defines a slot also defined in a base class, the instance\\n variable defined by the base class slot is inaccessible (except by\\n retrieving its descriptor directly from the base class). This\\n renders the meaning of the program undefined. In the future, a\\n check may be added to prevent this.\\n\\n* Nonempty *__slots__* does not work for classes derived from\\n \"variable-length\" built-in types such as ``int``, ``str`` and\\n ``tuple``.\\n\\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\\n also be used; however, in the future, special meaning may be\\n assigned to the values corresponding to each key.\\n\\n* *__class__* assignment works only if both classes have the same\\n *__slots__*.\\n\\n\\nCustomizing class creation\\n==========================\\n\\nBy default, classes are constructed using ``type()``. The class body\\nis executed in a new namespace and the class name is bound locally to\\nthe result of ``type(name, bases, namespace)``.\\n\\nThe class creation process can be customised by passing the\\n``metaclass`` keyword argument in the class definition line, or by\\ninheriting from an existing class that included such an argument. In\\nthe following example, both ``MyClass`` and ``MySubclass`` are\\ninstances of ``Meta``:\\n\\n class Meta(type):\\n pass\\n\\n class MyClass(metaclass=Meta):\\n pass\\n\\n class MySubclass(MyClass):\\n pass\\n\\nAny other keyword arguments that are specified in the class definition\\nare passed through to all metaclass operations described below.\\n\\nWhen a class definition is executed, the following steps occur:\\n\\n* the appropriate metaclass is determined\\n\\n* the class namespace is prepared\\n\\n* the class body is executed\\n\\n* the class object is created\\n\\n\\nDetermining the appropriate metaclass\\n-------------------------------------\\n\\nThe appropriate metaclass for a class definition is determined as\\nfollows:\\n\\n* if no bases and no explicit metaclass are given, then ``type()`` is\\n used\\n\\n* if an explicit metaclass is given and it is *not* an instance of\\n ``type()``, then it is used directly as the metaclass\\n\\n* if an instance of ``type()`` is given as the explicit metaclass, or\\n bases are defined, then the most derived metaclass is used\\n\\nThe most derived metaclass is selected from the explicitly specified\\nmetaclass (if any) and the metaclasses (i.e. ``type(cls)``) of all\\nspecified base classes. The most derived metaclass is one which is a\\nsubtype of *all* of these candidate metaclasses. If none of the\\ncandidate metaclasses meets that criterion, then the class definition\\nwill fail with ``TypeError``.\\n\\n\\nPreparing the class namespace\\n-----------------------------\\n\\nOnce the appropriate metaclass has been identified, then the class\\nnamespace is prepared. If the metaclass has a ``__prepare__``\\nattribute, it is called as ``namespace = metaclass.__prepare__(name,\\nbases, **kwds)`` (where the additional keyword arguments, if any, come\\nfrom the class definition).\\n\\nIf the metaclass has no ``__prepare__`` attribute, then the class\\nnamespace is initialised as an empty ``dict()`` instance.\\n\\nSee also:\\n\\n **PEP 3115** - Metaclasses in Python 3000\\n Introduced the ``__prepare__`` namespace hook\\n\\n\\nExecuting the class body\\n------------------------\\n\\nThe class body is executed (approximately) as ``exec(body, globals(),\\nnamespace)``. The key difference from a normal call to ``exec()`` is\\nthat lexical scoping allows the class body (including any methods) to\\nreference names from the current and outer scopes when the class\\ndefinition occurs inside a function.\\n\\nHowever, even when the class definition occurs inside the function,\\nmethods defined inside the class still cannot see names defined at the\\nclass scope. Class variables must be accessed through the first\\nparameter of instance or class methods, and cannot be accessed at all\\nfrom static methods.\\n\\n\\nCreating the class object\\n-------------------------\\n\\nOnce the class namespace has been populated by executing the class\\nbody, the class object is created by calling ``metaclass(name, bases,\\nnamespace, **kwds)`` (the additional keywords passed here are the same\\nas those passed to ``__prepare__``).\\n\\nThis class object is the one that will be referenced by the zero-\\nargument form of ``super()``. ``__class__`` is an implicit closure\\nreference created by the compiler if any methods in a class body refer\\nto either ``__class__`` or ``super``. This allows the zero argument\\nform of ``super()`` to correctly identify the class being defined\\nbased on lexical scoping, while the class or instance that was used to\\nmake the current call is identified based on the first argument passed\\nto the method.\\n\\nAfter the class object is created, it is passed to the class\\ndecorators included in the class definition (if any) and the resulting\\nobject is bound in the local namespace as the defined class.\\n\\nSee also:\\n\\n **PEP 3135** - New super\\n Describes the implicit ``__class__`` closure reference\\n\\n\\nMetaclass example\\n-----------------\\n\\nThe potential uses for metaclasses are boundless. Some ideas that have\\nbeen explored include logging, interface checking, automatic\\ndelegation, automatic property creation, proxies, frameworks, and\\nautomatic resource locking/synchronization.\\n\\nHere is an example of a metaclass that uses an\\n``collections.OrderedDict`` to remember the order that class members\\nwere defined:\\n\\n class OrderedClass(type):\\n\\n @classmethod\\n def __prepare__(metacls, name, bases, **kwds):\\n return collections.OrderedDict()\\n\\n def __new__(cls, name, bases, namespace, **kwds):\\n result = type.__new__(cls, name, bases, dict(namespace))\\n result.members = tuple(namespace)\\n return result\\n\\n class A(metaclass=OrderedClass):\\n def one(self): pass\\n def two(self): pass\\n def three(self): pass\\n def four(self): pass\\n\\n >>> A.members\\n (\\'__module__\\', \\'one\\', \\'two\\', \\'three\\', \\'four\\')\\n\\nWhen the class definition for *A* gets executed, the process begins\\nwith calling the metaclass\\'s ``__prepare__()`` method which returns an\\nempty ``collections.OrderedDict``. That mapping records the methods\\nand attributes of *A* as they are defined within the body of the class\\nstatement. Once those definitions are executed, the ordered dictionary\\nis fully populated and the metaclass\\'s ``__new__()`` method gets\\ninvoked. That method builds the new type and it saves the ordered\\ndictionary keys in an attribute called ``members``.\\n\\n\\nCustomizing instance and subclass checks\\n========================================\\n\\nThe following methods are used to override the default behavior of the\\n``isinstance()`` and ``issubclass()`` built-in functions.\\n\\nIn particular, the metaclass ``abc.ABCMeta`` implements these methods\\nin order to allow the addition of Abstract Base Classes (ABCs) as\\n\"virtual base classes\" to any class or type (including built-in\\ntypes), including other ABCs.\\n\\nclass.__instancecheck__(self, instance)\\n\\n Return true if *instance* should be considered a (direct or\\n indirect) instance of *class*. If defined, called to implement\\n ``isinstance(instance, class)``.\\n\\nclass.__subclasscheck__(self, subclass)\\n\\n Return true if *subclass* should be considered a (direct or\\n indirect) subclass of *class*. If defined, called to implement\\n ``issubclass(subclass, class)``.\\n\\nNote that these methods are looked up on the type (metaclass) of a\\nclass. They cannot be defined as class methods in the actual class.\\nThis is consistent with the lookup of special methods that are called\\non instances, only in this case the instance is itself a class.\\n\\nSee also:\\n\\n **PEP 3119** - Introducing Abstract Base Classes\\n Includes the specification for customizing ``isinstance()`` and\\n ``issubclass()`` behavior through ``__instancecheck__()`` and\\n ``__subclasscheck__()``, with motivation for this functionality\\n in the context of adding Abstract Base Classes (see the ``abc``\\n module) to the language.\\n\\n\\nEmulating callable objects\\n==========================\\n\\nobject.__call__(self[, args...])\\n\\n Called when the instance is \"called\" as a function; if this method\\n is defined, ``x(arg1, arg2, ...)`` is a shorthand for\\n ``x.__call__(arg1, arg2, ...)``.\\n\\n\\nEmulating container types\\n=========================\\n\\nThe following methods can be defined to implement container objects.\\nContainers usually are sequences (such as lists or tuples) or mappings\\n(like dictionaries), but can represent other containers as well. The\\nfirst set of methods is used either to emulate a sequence or to\\nemulate a mapping; the difference is that for a sequence, the\\nallowable keys should be the integers *k* for which ``0 <= k < N``\\nwhere *N* is the length of the sequence, or slice objects, which\\ndefine a range of items. It is also recommended that mappings provide\\nthe methods ``keys()``, ``values()``, ``items()``, ``get()``,\\n``clear()``, ``setdefault()``, ``pop()``, ``popitem()``, ``copy()``,\\nand ``update()`` behaving similar to those for Python\\'s standard\\ndictionary objects. The ``collections`` module provides a\\n``MutableMapping`` abstract base class to help create those methods\\nfrom a base set of ``__getitem__()``, ``__setitem__()``,\\n``__delitem__()``, and ``keys()``. Mutable sequences should provide\\nmethods ``append()``, ``count()``, ``index()``, ``extend()``,\\n``insert()``, ``pop()``, ``remove()``, ``reverse()`` and ``sort()``,\\nlike Python standard list objects. Finally, sequence types should\\nimplement addition (meaning concatenation) and multiplication (meaning\\nrepetition) by defining the methods ``__add__()``, ``__radd__()``,\\n``__iadd__()``, ``__mul__()``, ``__rmul__()`` and ``__imul__()``\\ndescribed below; they should not define other numerical operators. It\\nis recommended that both mappings and sequences implement the\\n``__contains__()`` method to allow efficient use of the ``in``\\noperator; for mappings, ``in`` should search the mapping\\'s keys; for\\nsequences, it should search through the values. It is further\\nrecommended that both mappings and sequences implement the\\n``__iter__()`` method to allow efficient iteration through the\\ncontainer; for mappings, ``__iter__()`` should be the same as\\n``keys()``; for sequences, it should iterate through the values.\\n\\nobject.__len__(self)\\n\\n Called to implement the built-in function ``len()``. Should return\\n the length of the object, an integer ``>=`` 0. Also, an object\\n that doesn\\'t define a ``__bool__()`` method and whose ``__len__()``\\n method returns zero is considered to be false in a Boolean context.\\n\\nNote: Slicing is done exclusively with the following three methods. A\\n call like\\n\\n a[1:2] = b\\n\\n is translated to\\n\\n a[slice(1, 2, None)] = b\\n\\n and so forth. Missing slice items are always filled in with\\n ``None``.\\n\\nobject.__getitem__(self, key)\\n\\n Called to implement evaluation of ``self[key]``. For sequence\\n types, the accepted keys should be integers and slice objects.\\n Note that the special interpretation of negative indexes (if the\\n class wishes to emulate a sequence type) is up to the\\n ``__getitem__()`` method. If *key* is of an inappropriate type,\\n ``TypeError`` may be raised; if of a value outside the set of\\n indexes for the sequence (after any special interpretation of\\n negative values), ``IndexError`` should be raised. For mapping\\n types, if *key* is missing (not in the container), ``KeyError``\\n should be raised.\\n\\n Note: ``for`` loops expect that an ``IndexError`` will be raised for\\n illegal indexes to allow proper detection of the end of the\\n sequence.\\n\\nobject.__setitem__(self, key, value)\\n\\n Called to implement assignment to ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support changes to the values for keys, or if new keys\\n can be added, or for sequences if elements can be replaced. The\\n same exceptions should be raised for improper *key* values as for\\n the ``__getitem__()`` method.\\n\\nobject.__delitem__(self, key)\\n\\n Called to implement deletion of ``self[key]``. Same note as for\\n ``__getitem__()``. This should only be implemented for mappings if\\n the objects support removal of keys, or for sequences if elements\\n can be removed from the sequence. The same exceptions should be\\n raised for improper *key* values as for the ``__getitem__()``\\n method.\\n\\nobject.__iter__(self)\\n\\n This method is called when an iterator is required for a container.\\n This method should return a new iterator object that can iterate\\n over all the objects in the container. For mappings, it should\\n iterate over the keys of the container, and should also be made\\n available as the method ``keys()``.\\n\\n Iterator objects also need to implement this method; they are\\n required to return themselves. For more information on iterator\\n objects, see *Iterator Types*.\\n\\nobject.__reversed__(self)\\n\\n Called (if present) by the ``reversed()`` built-in to implement\\n reverse iteration. It should return a new iterator object that\\n iterates over all the objects in the container in reverse order.\\n\\n If the ``__reversed__()`` method is not provided, the\\n ``reversed()`` built-in will fall back to using the sequence\\n protocol (``__len__()`` and ``__getitem__()``). Objects that\\n support the sequence protocol should only provide\\n ``__reversed__()`` if they can provide an implementation that is\\n more efficient than the one provided by ``reversed()``.\\n\\nThe membership test operators (``in`` and ``not in``) are normally\\nimplemented as an iteration through a sequence. However, container\\nobjects can supply the following special method with a more efficient\\nimplementation, which also does not require the object be a sequence.\\n\\nobject.__contains__(self, item)\\n\\n Called to implement membership test operators. Should return true\\n if *item* is in *self*, false otherwise. For mapping objects, this\\n should consider the keys of the mapping rather than the values or\\n the key-item pairs.\\n\\n For objects that don\\'t define ``__contains__()``, the membership\\n test first tries iteration via ``__iter__()``, then the old\\n sequence iteration protocol via ``__getitem__()``, see *this\\n section in the language reference*.\\n\\n\\nEmulating numeric types\\n=======================\\n\\nThe following methods can be defined to emulate numeric objects.\\nMethods corresponding to operations that are not supported by the\\nparticular kind of number implemented (e.g., bitwise operations for\\nnon-integral numbers) should be left undefined.\\n\\nobject.__add__(self, other)\\nobject.__sub__(self, other)\\nobject.__mul__(self, other)\\nobject.__truediv__(self, other)\\nobject.__floordiv__(self, other)\\nobject.__mod__(self, other)\\nobject.__divmod__(self, other)\\nobject.__pow__(self, other[, modulo])\\nobject.__lshift__(self, other)\\nobject.__rshift__(self, other)\\nobject.__and__(self, other)\\nobject.__xor__(self, other)\\nobject.__or__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``). For instance, to evaluate the expression ``x + y``, where\\n *x* is an instance of a class that has an ``__add__()`` method,\\n ``x.__add__(y)`` is called. The ``__divmod__()`` method should be\\n the equivalent to using ``__floordiv__()`` and ``__mod__()``; it\\n should not be related to ``__truediv__()``. Note that\\n ``__pow__()`` should be defined to accept an optional third\\n argument if the ternary version of the built-in ``pow()`` function\\n is to be supported.\\n\\n If one of those methods does not support the operation with the\\n supplied arguments, it should return ``NotImplemented``.\\n\\nobject.__radd__(self, other)\\nobject.__rsub__(self, other)\\nobject.__rmul__(self, other)\\nobject.__rtruediv__(self, other)\\nobject.__rfloordiv__(self, other)\\nobject.__rmod__(self, other)\\nobject.__rdivmod__(self, other)\\nobject.__rpow__(self, other)\\nobject.__rlshift__(self, other)\\nobject.__rrshift__(self, other)\\nobject.__rand__(self, other)\\nobject.__rxor__(self, other)\\nobject.__ror__(self, other)\\n\\n These methods are called to implement the binary arithmetic\\n operations (``+``, ``-``, ``*``, ``/``, ``//``, ``%``,\\n ``divmod()``, ``pow()``, ``**``, ``<<``, ``>>``, ``&``, ``^``,\\n ``|``) with reflected (swapped) operands. These functions are only\\n called if the left operand does not support the corresponding\\n operation and the operands are of different types. [2] For\\n instance, to evaluate the expression ``x - y``, where *y* is an\\n instance of a class that has an ``__rsub__()`` method,\\n ``y.__rsub__(x)`` is called if ``x.__sub__(y)`` returns\\n *NotImplemented*.\\n\\n Note that ternary ``pow()`` will not try calling ``__rpow__()``\\n (the coercion rules would become too complicated).\\n\\n Note: If the right operand\\'s type is a subclass of the left operand\\'s\\n type and that subclass provides the reflected method for the\\n operation, this method will be called before the left operand\\'s\\n non-reflected method. This behavior allows subclasses to\\n override their ancestors\\' operations.\\n\\nobject.__iadd__(self, other)\\nobject.__isub__(self, other)\\nobject.__imul__(self, other)\\nobject.__itruediv__(self, other)\\nobject.__ifloordiv__(self, other)\\nobject.__imod__(self, other)\\nobject.__ipow__(self, other[, modulo])\\nobject.__ilshift__(self, other)\\nobject.__irshift__(self, other)\\nobject.__iand__(self, other)\\nobject.__ixor__(self, other)\\nobject.__ior__(self, other)\\n\\n These methods are called to implement the augmented arithmetic\\n assignments (``+=``, ``-=``, ``*=``, ``/=``, ``//=``, ``%=``,\\n ``**=``, ``<<=``, ``>>=``, ``&=``, ``^=``, ``|=``). These methods\\n should attempt to do the operation in-place (modifying *self*) and\\n return the result (which could be, but does not have to be,\\n *self*). If a specific method is not defined, the augmented\\n assignment falls back to the normal methods. For instance, to\\n execute the statement ``x += y``, where *x* is an instance of a\\n class that has an ``__iadd__()`` method, ``x.__iadd__(y)`` is\\n called. If *x* is an instance of a class that does not define a\\n ``__iadd__()`` method, ``x.__add__(y)`` and ``y.__radd__(x)`` are\\n considered, as with the evaluation of ``x + y``.\\n\\nobject.__neg__(self)\\nobject.__pos__(self)\\nobject.__abs__(self)\\nobject.__invert__(self)\\n\\n Called to implement the unary arithmetic operations (``-``, ``+``,\\n ``abs()`` and ``~``).\\n\\nobject.__complex__(self)\\nobject.__int__(self)\\nobject.__float__(self)\\nobject.__round__(self[, n])\\n\\n Called to implement the built-in functions ``complex()``,\\n ``int()``, ``float()`` and ``round()``. Should return a value of\\n the appropriate type.\\n\\nobject.__index__(self)\\n\\n Called to implement ``operator.index()``. Also called whenever\\n Python needs an integer object (such as in slicing, or in the\\n built-in ``bin()``, ``hex()`` and ``oct()`` functions). Must return\\n an integer.\\n\\n\\nWith Statement Context Managers\\n===============================\\n\\nA *context manager* is an object that defines the runtime context to\\nbe established when executing a ``with`` statement. The context\\nmanager handles the entry into, and the exit from, the desired runtime\\ncontext for the execution of the block of code. Context managers are\\nnormally invoked using the ``with`` statement (described in section\\n*The with statement*), but can also be used by directly invoking their\\nmethods.\\n\\nTypical uses of context managers include saving and restoring various\\nkinds of global state, locking and unlocking resources, closing opened\\nfiles, etc.\\n\\nFor more information on context managers, see *Context Manager Types*.\\n\\nobject.__enter__(self)\\n\\n Enter the runtime context related to this object. The ``with``\\n statement will bind this method\\'s return value to the target(s)\\n specified in the ``as`` clause of the statement, if any.\\n\\nobject.__exit__(self, exc_type, exc_value, traceback)\\n\\n Exit the runtime context related to this object. The parameters\\n describe the exception that caused the context to be exited. If the\\n context was exited without an exception, all three arguments will\\n be ``None``.\\n\\n If an exception is supplied, and the method wishes to suppress the\\n exception (i.e., prevent it from being propagated), it should\\n return a true value. Otherwise, the exception will be processed\\n normally upon exit from this method.\\n\\n Note that ``__exit__()`` methods should not reraise the passed-in\\n exception; this is the caller\\'s responsibility.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n\\n\\nSpecial method lookup\\n=====================\\n\\nFor custom classes, implicit invocations of special methods are only\\nguaranteed to work correctly if defined on an object\\'s type, not in\\nthe object\\'s instance dictionary. That behaviour is the reason why\\nthe following code raises an exception:\\n\\n >>> class C:\\n ... pass\\n ...\\n >>> c = C()\\n >>> c.__len__ = lambda: 5\\n >>> len(c)\\n Traceback (most recent call last):\\n File \"\", line 1, in \\n TypeError: object of type \\'C\\' has no len()\\n\\nThe rationale behind this behaviour lies with a number of special\\nmethods such as ``__hash__()`` and ``__repr__()`` that are implemented\\nby all objects, including type objects. If the implicit lookup of\\nthese methods used the conventional lookup process, they would fail\\nwhen invoked on the type object itself:\\n\\n >>> 1 .__hash__() == hash(1)\\n True\\n >>> int.__hash__() == hash(int)\\n Traceback (most recent call last):\\n File \"\", line 1, in \\n TypeError: descriptor \\'__hash__\\' of \\'int\\' object needs an argument\\n\\nIncorrectly attempting to invoke an unbound method of a class in this\\nway is sometimes referred to as \\'metaclass confusion\\', and is avoided\\nby bypassing the instance when looking up special methods:\\n\\n >>> type(1).__hash__(1) == hash(1)\\n True\\n >>> type(int).__hash__(int) == hash(int)\\n True\\n\\nIn addition to bypassing any instance attributes in the interest of\\ncorrectness, implicit special method lookup generally also bypasses\\nthe ``__getattribute__()`` method even of the object\\'s metaclass:\\n\\n >>> class Meta(type):\\n ... def __getattribute__(*args):\\n ... print(\"Metaclass getattribute invoked\")\\n ... return type.__getattribute__(*args)\\n ...\\n >>> class C(object, metaclass=Meta):\\n ... def __len__(self):\\n ... return 10\\n ... def __getattribute__(*args):\\n ... print(\"Class getattribute invoked\")\\n ... return object.__getattribute__(*args)\\n ...\\n >>> c = C()\\n >>> c.__len__() # Explicit lookup via instance\\n Class getattribute invoked\\n 10\\n >>> type(c).__len__(c) # Explicit lookup via type\\n Metaclass getattribute invoked\\n 10\\n >>> len(c) # Implicit lookup\\n 10\\n\\nBypassing the ``__getattribute__()`` machinery in this fashion\\nprovides significant scope for speed optimisations within the\\ninterpreter, at the cost of some flexibility in the handling of\\nspecial methods (the special method *must* be set on the class object\\nitself in order to be consistently invoked by the interpreter).\\n\\n-[ Footnotes ]-\\n\\n[1] It *is* possible in some cases to change an object\\'s type, under\\n certain controlled conditions. It generally isn\\'t a good idea\\n though, since it can lead to some very strange behaviour if it is\\n handled incorrectly.\\n\\n[2] For operands of the same type, it is assumed that if the non-\\n reflected method (such as ``__add__()``) fails the operation is\\n not supported, which is why the reflected method is not called.\\n',\n'string-methods':'\\nString Methods\\n**************\\n\\nStrings implement all of the *common* sequence operations, along with\\nthe additional methods described below.\\n\\nStrings also support two styles of string formatting, one providing a\\nlarge degree of flexibility and customization (see ``str.format()``,\\n*Format String Syntax* and *String Formatting*) and the other based on\\nC ``printf`` style formatting that handles a narrower range of types\\nand is slightly harder to use correctly, but is often faster for the\\ncases it can handle (*printf-style String Formatting*).\\n\\nThe *Text Processing Services* section of the standard library covers\\na number of other modules that provide various text related utilities\\n(including regular expression support in the ``re`` module).\\n\\nstr.capitalize()\\n\\n Return a copy of the string with its first character capitalized\\n and the rest lowercased.\\n\\nstr.casefold()\\n\\n Return a casefolded copy of the string. Casefolded strings may be\\n used for caseless matching.\\n\\n Casefolding is similar to lowercasing but more aggressive because\\n it is intended to remove all case distinctions in a string. For\\n example, the German lowercase letter ``\\'\\xc3\\x9f\\'`` is equivalent to\\n ``\"ss\"``. Since it is already lowercase, ``lower()`` would do\\n nothing to ``\\'\\xc3\\x9f\\'``; ``casefold()`` converts it to ``\"ss\"``.\\n\\n The casefolding algorithm is described in section 3.13 of the\\n Unicode Standard.\\n\\n New in version 3.3.\\n\\nstr.center(width[, fillchar])\\n\\n Return centered in a string of length *width*. Padding is done\\n using the specified *fillchar* (default is a space).\\n\\nstr.count(sub[, start[, end]])\\n\\n Return the number of non-overlapping occurrences of substring *sub*\\n in the range [*start*, *end*]. Optional arguments *start* and\\n *end* are interpreted as in slice notation.\\n\\nstr.encode(encoding=\"utf-8\", errors=\"strict\")\\n\\n Return an encoded version of the string as a bytes object. Default\\n encoding is ``\\'utf-8\\'``. *errors* may be given to set a different\\n error handling scheme. The default for *errors* is ``\\'strict\\'``,\\n meaning that encoding errors raise a ``UnicodeError``. Other\\n possible values are ``\\'ignore\\'``, ``\\'replace\\'``,\\n ``\\'xmlcharrefreplace\\'``, ``\\'backslashreplace\\'`` and any other name\\n registered via ``codecs.register_error()``, see section *Codec Base\\n Classes*. For a list of possible encodings, see section *Standard\\n Encodings*.\\n\\n Changed in version 3.1: Support for keyword arguments added.\\n\\nstr.endswith(suffix[, start[, end]])\\n\\n Return ``True`` if the string ends with the specified *suffix*,\\n otherwise return ``False``. *suffix* can also be a tuple of\\n suffixes to look for. With optional *start*, test beginning at\\n that position. With optional *end*, stop comparing at that\\n position.\\n\\nstr.expandtabs([tabsize])\\n\\n Return a copy of the string where all tab characters are replaced\\n by zero or more spaces, depending on the current column and the\\n given tab size. The column number is reset to zero after each\\n newline occurring in the string. If *tabsize* is not given, a tab\\n size of ``8`` characters is assumed. This doesn\\'t understand other\\n non-printing characters or escape sequences.\\n\\nstr.find(sub[, start[, end]])\\n\\n Return the lowest index in the string where substring *sub* is\\n found, such that *sub* is contained in the slice ``s[start:end]``.\\n Optional arguments *start* and *end* are interpreted as in slice\\n notation. Return ``-1`` if *sub* is not found.\\n\\n Note: The ``find()`` method should be used only if you need to know the\\n position of *sub*. To check if *sub* is a substring or not, use\\n the ``in`` operator:\\n\\n >>> \\'Py\\' in \\'Python\\'\\n True\\n\\nstr.format(*args, **kwargs)\\n\\n Perform a string formatting operation. The string on which this\\n method is called can contain literal text or replacement fields\\n delimited by braces ``{}``. Each replacement field contains either\\n the numeric index of a positional argument, or the name of a\\n keyword argument. Returns a copy of the string where each\\n replacement field is replaced with the string value of the\\n corresponding argument.\\n\\n >>> \"The sum of 1 + 2 is {0}\".format(1+2)\\n \\'The sum of 1 + 2 is 3\\'\\n\\n See *Format String Syntax* for a description of the various\\n formatting options that can be specified in format strings.\\n\\nstr.format_map(mapping)\\n\\n Similar to ``str.format(**mapping)``, except that ``mapping`` is\\n used directly and not copied to a ``dict`` . This is useful if for\\n example ``mapping`` is a dict subclass:\\n\\n >>> class Default(dict):\\n ... def __missing__(self, key):\\n ... return key\\n ...\\n >>> \\'{name} was born in {country}\\'.format_map(Default(name=\\'Guido\\'))\\n \\'Guido was born in country\\'\\n\\n New in version 3.2.\\n\\nstr.index(sub[, start[, end]])\\n\\n Like ``find()``, but raise ``ValueError`` when the substring is not\\n found.\\n\\nstr.isalnum()\\n\\n Return true if all characters in the string are alphanumeric and\\n there is at least one character, false otherwise. A character\\n ``c`` is alphanumeric if one of the following returns ``True``:\\n ``c.isalpha()``, ``c.isdecimal()``, ``c.isdigit()``, or\\n ``c.isnumeric()``.\\n\\nstr.isalpha()\\n\\n Return true if all characters in the string are alphabetic and\\n there is at least one character, false otherwise. Alphabetic\\n characters are those characters defined in the Unicode character\\n database as \"Letter\", i.e., those with general category property\\n being one of \"Lm\", \"Lt\", \"Lu\", \"Ll\", or \"Lo\". Note that this is\\n different from the \"Alphabetic\" property defined in the Unicode\\n Standard.\\n\\nstr.isdecimal()\\n\\n Return true if all characters in the string are decimal characters\\n and there is at least one character, false otherwise. Decimal\\n characters are those from general category \"Nd\". This category\\n includes digit characters, and all characters that can be used to\\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\\n\\nstr.isdigit()\\n\\n Return true if all characters in the string are digits and there is\\n at least one character, false otherwise. Digits include decimal\\n characters and digits that need special handling, such as the\\n compatibility superscript digits. Formally, a digit is a character\\n that has the property value Numeric_Type=Digit or\\n Numeric_Type=Decimal.\\n\\nstr.isidentifier()\\n\\n Return true if the string is a valid identifier according to the\\n language definition, section *Identifiers and keywords*.\\n\\nstr.islower()\\n\\n Return true if all cased characters [4] in the string are lowercase\\n and there is at least one cased character, false otherwise.\\n\\nstr.isnumeric()\\n\\n Return true if all characters in the string are numeric characters,\\n and there is at least one character, false otherwise. Numeric\\n characters include digit characters, and all characters that have\\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\\n ONE FIFTH. Formally, numeric characters are those with the\\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\\n Numeric_Type=Numeric.\\n\\nstr.isprintable()\\n\\n Return true if all characters in the string are printable or the\\n string is empty, false otherwise. Nonprintable characters are\\n those characters defined in the Unicode character database as\\n \"Other\" or \"Separator\", excepting the ASCII space (0x20) which is\\n considered printable. (Note that printable characters in this\\n context are those which should not be escaped when ``repr()`` is\\n invoked on a string. It has no bearing on the handling of strings\\n written to ``sys.stdout`` or ``sys.stderr``.)\\n\\nstr.isspace()\\n\\n Return true if there are only whitespace characters in the string\\n and there is at least one character, false otherwise. Whitespace\\n characters are those characters defined in the Unicode character\\n database as \"Other\" or \"Separator\" and those with bidirectional\\n property being one of \"WS\", \"B\", or \"S\".\\n\\nstr.istitle()\\n\\n Return true if the string is a titlecased string and there is at\\n least one character, for example uppercase characters may only\\n follow uncased characters and lowercase characters only cased ones.\\n Return false otherwise.\\n\\nstr.isupper()\\n\\n Return true if all cased characters [4] in the string are uppercase\\n and there is at least one cased character, false otherwise.\\n\\nstr.join(iterable)\\n\\n Return a string which is the concatenation of the strings in the\\n *iterable* *iterable*. A ``TypeError`` will be raised if there are\\n any non-string values in *iterable*, including ``bytes`` objects.\\n The separator between elements is the string providing this method.\\n\\nstr.ljust(width[, fillchar])\\n\\n Return the string left justified in a string of length *width*.\\n Padding is done using the specified *fillchar* (default is a\\n space). The original string is returned if *width* is less than or\\n equal to ``len(s)``.\\n\\nstr.lower()\\n\\n Return a copy of the string with all the cased characters [4]\\n converted to lowercase.\\n\\n The lowercasing algorithm used is described in section 3.13 of the\\n Unicode Standard.\\n\\nstr.lstrip([chars])\\n\\n Return a copy of the string with leading characters removed. The\\n *chars* argument is a string specifying the set of characters to be\\n removed. If omitted or ``None``, the *chars* argument defaults to\\n removing whitespace. The *chars* argument is not a prefix; rather,\\n all combinations of its values are stripped:\\n\\n >>> \\' spacious \\'.lstrip()\\n \\'spacious \\'\\n >>> \\'www.example.com\\'.lstrip(\\'cmowz.\\')\\n \\'example.com\\'\\n\\nstatic str.maketrans(x[, y[, z]])\\n\\n This static method returns a translation table usable for\\n ``str.translate()``.\\n\\n If there is only one argument, it must be a dictionary mapping\\n Unicode ordinals (integers) or characters (strings of length 1) to\\n Unicode ordinals, strings (of arbitrary lengths) or None.\\n Character keys will then be converted to ordinals.\\n\\n If there are two arguments, they must be strings of equal length,\\n and in the resulting dictionary, each character in x will be mapped\\n to the character at the same position in y. If there is a third\\n argument, it must be a string, whose characters will be mapped to\\n None in the result.\\n\\nstr.partition(sep)\\n\\n Split the string at the first occurrence of *sep*, and return a\\n 3-tuple containing the part before the separator, the separator\\n itself, and the part after the separator. If the separator is not\\n found, return a 3-tuple containing the string itself, followed by\\n two empty strings.\\n\\nstr.replace(old, new[, count])\\n\\n Return a copy of the string with all occurrences of substring *old*\\n replaced by *new*. If the optional argument *count* is given, only\\n the first *count* occurrences are replaced.\\n\\nstr.rfind(sub[, start[, end]])\\n\\n Return the highest index in the string where substring *sub* is\\n found, such that *sub* is contained within ``s[start:end]``.\\n Optional arguments *start* and *end* are interpreted as in slice\\n notation. Return ``-1`` on failure.\\n\\nstr.rindex(sub[, start[, end]])\\n\\n Like ``rfind()`` but raises ``ValueError`` when the substring *sub*\\n is not found.\\n\\nstr.rjust(width[, fillchar])\\n\\n Return the string right justified in a string of length *width*.\\n Padding is done using the specified *fillchar* (default is a\\n space). The original string is returned if *width* is less than or\\n equal to ``len(s)``.\\n\\nstr.rpartition(sep)\\n\\n Split the string at the last occurrence of *sep*, and return a\\n 3-tuple containing the part before the separator, the separator\\n itself, and the part after the separator. If the separator is not\\n found, return a 3-tuple containing two empty strings, followed by\\n the string itself.\\n\\nstr.rsplit(sep=None, maxsplit=-1)\\n\\n Return a list of the words in the string, using *sep* as the\\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\\n are done, the *rightmost* ones. If *sep* is not specified or\\n ``None``, any whitespace string is a separator. Except for\\n splitting from the right, ``rsplit()`` behaves like ``split()``\\n which is described in detail below.\\n\\nstr.rstrip([chars])\\n\\n Return a copy of the string with trailing characters removed. The\\n *chars* argument is a string specifying the set of characters to be\\n removed. If omitted or ``None``, the *chars* argument defaults to\\n removing whitespace. The *chars* argument is not a suffix; rather,\\n all combinations of its values are stripped:\\n\\n >>> \\' spacious \\'.rstrip()\\n \\' spacious\\'\\n >>> \\'mississippi\\'.rstrip(\\'ipz\\')\\n \\'mississ\\'\\n\\nstr.split(sep=None, maxsplit=-1)\\n\\n Return a list of the words in the string, using *sep* as the\\n delimiter string. If *maxsplit* is given, at most *maxsplit*\\n splits are done (thus, the list will have at most ``maxsplit+1``\\n elements). If *maxsplit* is not specified or ``-1``, then there is\\n no limit on the number of splits (all possible splits are made).\\n\\n If *sep* is given, consecutive delimiters are not grouped together\\n and are deemed to delimit empty strings (for example,\\n ``\\'1,,2\\'.split(\\',\\')`` returns ``[\\'1\\', \\'\\', \\'2\\']``). The *sep*\\n argument may consist of multiple characters (for example,\\n ``\\'1<>2<>3\\'.split(\\'<>\\')`` returns ``[\\'1\\', \\'2\\', \\'3\\']``). Splitting\\n an empty string with a specified separator returns ``[\\'\\']``.\\n\\n If *sep* is not specified or is ``None``, a different splitting\\n algorithm is applied: runs of consecutive whitespace are regarded\\n as a single separator, and the result will contain no empty strings\\n at the start or end if the string has leading or trailing\\n whitespace. Consequently, splitting an empty string or a string\\n consisting of just whitespace with a ``None`` separator returns\\n ``[]``.\\n\\n For example, ``\\' 1 2 3 \\'.split()`` returns ``[\\'1\\', \\'2\\', \\'3\\']``,\\n and ``\\' 1 2 3 \\'.split(None, 1)`` returns ``[\\'1\\', \\'2 3 \\']``.\\n\\nstr.splitlines([keepends])\\n\\n Return a list of the lines in the string, breaking at line\\n boundaries. This method uses the *universal newlines* approach to\\n splitting lines. Line breaks are not included in the resulting list\\n unless *keepends* is given and true.\\n\\n For example, ``\\'ab c\\\\n\\\\nde fg\\\\rkl\\\\r\\\\n\\'.splitlines()`` returns\\n ``[\\'ab c\\', \\'\\', \\'de fg\\', \\'kl\\']``, while the same call with\\n ``splitlines(True)`` returns ``[\\'ab c\\\\n\\', \\'\\\\n\\', \\'de fg\\\\r\\',\\n \\'kl\\\\r\\\\n\\']``.\\n\\n Unlike ``split()`` when a delimiter string *sep* is given, this\\n method returns an empty list for the empty string, and a terminal\\n line break does not result in an extra line.\\n\\nstr.startswith(prefix[, start[, end]])\\n\\n Return ``True`` if string starts with the *prefix*, otherwise\\n return ``False``. *prefix* can also be a tuple of prefixes to look\\n for. With optional *start*, test string beginning at that\\n position. With optional *end*, stop comparing string at that\\n position.\\n\\nstr.strip([chars])\\n\\n Return a copy of the string with the leading and trailing\\n characters removed. The *chars* argument is a string specifying the\\n set of characters to be removed. If omitted or ``None``, the\\n *chars* argument defaults to removing whitespace. The *chars*\\n argument is not a prefix or suffix; rather, all combinations of its\\n values are stripped:\\n\\n >>> \\' spacious \\'.strip()\\n \\'spacious\\'\\n >>> \\'www.example.com\\'.strip(\\'cmowz.\\')\\n \\'example\\'\\n\\nstr.swapcase()\\n\\n Return a copy of the string with uppercase characters converted to\\n lowercase and vice versa. Note that it is not necessarily true that\\n ``s.swapcase().swapcase() == s``.\\n\\nstr.title()\\n\\n Return a titlecased version of the string where words start with an\\n uppercase character and the remaining characters are lowercase.\\n\\n The algorithm uses a simple language-independent definition of a\\n word as groups of consecutive letters. The definition works in\\n many contexts but it means that apostrophes in contractions and\\n possessives form word boundaries, which may not be the desired\\n result:\\n\\n >>> \"they\\'re bill\\'s friends from the UK\".title()\\n \"They\\'Re Bill\\'S Friends From The Uk\"\\n\\n A workaround for apostrophes can be constructed using regular\\n expressions:\\n\\n >>> import re\\n >>> def titlecase(s):\\n ... return re.sub(r\"[A-Za-z]+(\\'[A-Za-z]+)?\",\\n ... lambda mo: mo.group(0)[0].upper() +\\n ... mo.group(0)[1:].lower(),\\n ... s)\\n ...\\n >>> titlecase(\"they\\'re bill\\'s friends.\")\\n \"They\\'re Bill\\'s Friends.\"\\n\\nstr.translate(map)\\n\\n Return a copy of the *s* where all characters have been mapped\\n through the *map* which must be a dictionary of Unicode ordinals\\n (integers) to Unicode ordinals, strings or ``None``. Unmapped\\n characters are left untouched. Characters mapped to ``None`` are\\n deleted.\\n\\n You can use ``str.maketrans()`` to create a translation map from\\n character-to-character mappings in different formats.\\n\\n Note: An even more flexible approach is to create a custom character\\n mapping codec using the ``codecs`` module (see\\n ``encodings.cp1251`` for an example).\\n\\nstr.upper()\\n\\n Return a copy of the string with all the cased characters [4]\\n converted to uppercase. Note that ``str.upper().isupper()`` might\\n be ``False`` if ``s`` contains uncased characters or if the Unicode\\n category of the resulting character(s) is not \"Lu\" (Letter,\\n uppercase), but e.g. \"Lt\" (Letter, titlecase).\\n\\n The uppercasing algorithm used is described in section 3.13 of the\\n Unicode Standard.\\n\\nstr.zfill(width)\\n\\n Return the numeric string left filled with zeros in a string of\\n length *width*. A sign prefix is handled correctly. The original\\n string is returned if *width* is less than or equal to ``len(s)``.\\n',\n'strings':'\\nString and Bytes literals\\n*************************\\n\\nString literals are described by the following lexical definitions:\\n\\n stringliteral ::= [stringprefix](shortstring | longstring)\\n stringprefix ::= \"r\" | \"u\" | \"R\" | \"U\"\\n shortstring ::= \"\\'\" shortstringitem* \"\\'\" | \\'\"\\' shortstringitem* \\'\"\\'\\n longstring ::= \"\\'\\'\\'\" longstringitem* \"\\'\\'\\'\" | \\'\"\"\"\\' longstringitem* \\'\"\"\"\\'\\n shortstringitem ::= shortstringchar | stringescapeseq\\n longstringitem ::= longstringchar | stringescapeseq\\n shortstringchar ::= \\n longstringchar ::= \\n stringescapeseq ::= \"\\\\\" \\n\\n bytesliteral ::= bytesprefix(shortbytes | longbytes)\\n bytesprefix ::= \"b\" | \"B\" | \"br\" | \"Br\" | \"bR\" | \"BR\" | \"rb\" | \"rB\" | \"Rb\" | \"RB\"\\n shortbytes ::= \"\\'\" shortbytesitem* \"\\'\" | \\'\"\\' shortbytesitem* \\'\"\\'\\n longbytes ::= \"\\'\\'\\'\" longbytesitem* \"\\'\\'\\'\" | \\'\"\"\"\\' longbytesitem* \\'\"\"\"\\'\\n shortbytesitem ::= shortbyteschar | bytesescapeseq\\n longbytesitem ::= longbyteschar | bytesescapeseq\\n shortbyteschar ::= \\n longbyteschar ::= \\n bytesescapeseq ::= \"\\\\\" \\n\\nOne syntactic restriction not indicated by these productions is that\\nwhitespace is not allowed between the ``stringprefix`` or\\n``bytesprefix`` and the rest of the literal. The source character set\\nis defined by the encoding declaration; it is UTF-8 if no encoding\\ndeclaration is given in the source file; see section *Encoding\\ndeclarations*.\\n\\nIn plain English: Both types of literals can be enclosed in matching\\nsingle quotes (``\\'``) or double quotes (``\"``). They can also be\\nenclosed in matching groups of three single or double quotes (these\\nare generally referred to as *triple-quoted strings*). The backslash\\n(``\\\\``) character is used to escape characters that otherwise have a\\nspecial meaning, such as newline, backslash itself, or the quote\\ncharacter.\\n\\nBytes literals are always prefixed with ``\\'b\\'`` or ``\\'B\\'``; they\\nproduce an instance of the ``bytes`` type instead of the ``str`` type.\\nThey may only contain ASCII characters; bytes with a numeric value of\\n128 or greater must be expressed with escapes.\\n\\nAs of Python 3.3 it is possible again to prefix unicode strings with a\\n``u`` prefix to simplify maintenance of dual 2.x and 3.x codebases.\\n\\nBoth string and bytes literals may optionally be prefixed with a\\nletter ``\\'r\\'`` or ``\\'R\\'``; such strings are called *raw strings* and\\ntreat backslashes as literal characters. As a result, in string\\nliterals, ``\\'\\\\U\\'`` and ``\\'\\\\u\\'`` escapes in raw strings are not treated\\nspecially. Given that Python 2.x\\'s raw unicode literals behave\\ndifferently than Python 3.x\\'s the ``\\'ur\\'`` syntax is not supported.\\n\\n New in version 3.3: The ``\\'rb\\'`` prefix of raw bytes literals has\\n been added as a synonym of ``\\'br\\'``.\\n\\n New in version 3.3: Support for the unicode legacy literal\\n (``u\\'value\\'``) was reintroduced to simplify the maintenance of dual\\n Python 2.x and 3.x codebases. See **PEP 414** for more information.\\n\\nIn triple-quoted strings, unescaped newlines and quotes are allowed\\n(and are retained), except that three unescaped quotes in a row\\nterminate the string. (A \"quote\" is the character used to open the\\nstring, i.e. either ``\\'`` or ``\"``.)\\n\\nUnless an ``\\'r\\'`` or ``\\'R\\'`` prefix is present, escape sequences in\\nstrings are interpreted according to rules similar to those used by\\nStandard C. The recognized escape sequences are:\\n\\n+-------------------+-----------------------------------+---------+\\n| Escape Sequence | Meaning | Notes |\\n+===================+===================================+=========+\\n| ``\\\\newline`` | Backslash and newline ignored | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\\\\\`` | Backslash (``\\\\``) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\\\'`` | Single quote (``\\'``) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\\"`` | Double quote (``\"``) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\a`` | ASCII Bell (BEL) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\b`` | ASCII Backspace (BS) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\f`` | ASCII Formfeed (FF) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\n`` | ASCII Linefeed (LF) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\r`` | ASCII Carriage Return (CR) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\t`` | ASCII Horizontal Tab (TAB) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\v`` | ASCII Vertical Tab (VT) | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\ooo`` | Character with octal value *ooo* | (1,3) |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\xhh`` | Character with hex value *hh* | (2,3) |\\n+-------------------+-----------------------------------+---------+\\n\\nEscape sequences only recognized in string literals are:\\n\\n+-------------------+-----------------------------------+---------+\\n| Escape Sequence | Meaning | Notes |\\n+===================+===================================+=========+\\n| ``\\\\N{name}`` | Character named *name* in the | (4) |\\n| | Unicode database | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\uxxxx`` | Character with 16-bit hex value | (5) |\\n| | *xxxx* | |\\n+-------------------+-----------------------------------+---------+\\n| ``\\\\Uxxxxxxxx`` | Character with 32-bit hex value | (6) |\\n| | *xxxxxxxx* | |\\n+-------------------+-----------------------------------+---------+\\n\\nNotes:\\n\\n1. As in Standard C, up to three octal digits are accepted.\\n\\n2. Unlike in Standard C, exactly two hex digits are required.\\n\\n3. In a bytes literal, hexadecimal and octal escapes denote the byte\\n with the given value. In a string literal, these escapes denote a\\n Unicode character with the given value.\\n\\n4. Changed in version 3.3: Support for name aliases [1] has been\\n added.\\n\\n5. Individual code units which form parts of a surrogate pair can be\\n encoded using this escape sequence. Exactly four hex digits are\\n required.\\n\\n6. Any Unicode character can be encoded this way. Exactly eight hex\\n digits are required.\\n\\nUnlike Standard C, all unrecognized escape sequences are left in the\\nstring unchanged, i.e., *the backslash is left in the string*. (This\\nbehavior is useful when debugging: if an escape sequence is mistyped,\\nthe resulting output is more easily recognized as broken.) It is also\\nimportant to note that the escape sequences only recognized in string\\nliterals fall into the category of unrecognized escapes for bytes\\nliterals.\\n\\nEven in a raw string, string quotes can be escaped with a backslash,\\nbut the backslash remains in the string; for example, ``r\"\\\\\"\"`` is a\\nvalid string literal consisting of two characters: a backslash and a\\ndouble quote; ``r\"\\\\\"`` is not a valid string literal (even a raw\\nstring cannot end in an odd number of backslashes). Specifically, *a\\nraw string cannot end in a single backslash* (since the backslash\\nwould escape the following quote character). Note also that a single\\nbackslash followed by a newline is interpreted as those two characters\\nas part of the string, *not* as a line continuation.\\n',\n'subscriptions':'\\nSubscriptions\\n*************\\n\\nA subscription selects an item of a sequence (string, tuple or list)\\nor mapping (dictionary) object:\\n\\n subscription ::= primary \"[\" expression_list \"]\"\\n\\nThe primary must evaluate to an object that supports subscription,\\ne.g. a list or dictionary. User-defined objects can support\\nsubscription by defining a ``__getitem__()`` method.\\n\\nFor built-in objects, there are two types of objects that support\\nsubscription:\\n\\nIf the primary is a mapping, the expression list must evaluate to an\\nobject whose value is one of the keys of the mapping, and the\\nsubscription selects the value in the mapping that corresponds to that\\nkey. (The expression list is a tuple except if it has exactly one\\nitem.)\\n\\nIf the primary is a sequence, the expression (list) must evaluate to\\nan integer or a slice (as discussed in the following section).\\n\\nThe formal syntax makes no special provision for negative indices in\\nsequences; however, built-in sequences all provide a ``__getitem__()``\\nmethod that interprets negative indices by adding the length of the\\nsequence to the index (so that ``x[-1]`` selects the last item of\\n``x``). The resulting value must be a nonnegative integer less than\\nthe number of items in the sequence, and the subscription selects the\\nitem whose index is that value (counting from zero). Since the support\\nfor negative indices and slicing occurs in the object\\'s\\n``__getitem__()`` method, subclasses overriding this method will need\\nto explicitly add that support.\\n\\nA string\\'s items are characters. A character is not a separate data\\ntype but a string of exactly one character.\\n',\n'truth':\"\\nTruth Value Testing\\n*******************\\n\\nAny object can be tested for truth value, for use in an ``if`` or\\n``while`` condition or as operand of the Boolean operations below. The\\nfollowing values are considered false:\\n\\n* ``None``\\n\\n* ``False``\\n\\n* zero of any numeric type, for example, ``0``, ``0.0``, ``0j``.\\n\\n* any empty sequence, for example, ``''``, ``()``, ``[]``.\\n\\n* any empty mapping, for example, ``{}``.\\n\\n* instances of user-defined classes, if the class defines a\\n ``__bool__()`` or ``__len__()`` method, when that method returns the\\n integer zero or ``bool`` value ``False``. [1]\\n\\nAll other values are considered true --- so objects of many types are\\nalways true.\\n\\nOperations and built-in functions that have a Boolean result always\\nreturn ``0`` or ``False`` for false and ``1`` or ``True`` for true,\\nunless otherwise stated. (Important exception: the Boolean operations\\n``or`` and ``and`` always return one of their operands.)\\n\",\n'try':'\\nThe ``try`` statement\\n*********************\\n\\nThe ``try`` statement specifies exception handlers and/or cleanup code\\nfor a group of statements:\\n\\n try_stmt ::= try1_stmt | try2_stmt\\n try1_stmt ::= \"try\" \":\" suite\\n (\"except\" [expression [\"as\" target]] \":\" suite)+\\n [\"else\" \":\" suite]\\n [\"finally\" \":\" suite]\\n try2_stmt ::= \"try\" \":\" suite\\n \"finally\" \":\" suite\\n\\nThe ``except`` clause(s) specify one or more exception handlers. When\\nno exception occurs in the ``try`` clause, no exception handler is\\nexecuted. When an exception occurs in the ``try`` suite, a search for\\nan exception handler is started. This search inspects the except\\nclauses in turn until one is found that matches the exception. An\\nexpression-less except clause, if present, must be last; it matches\\nany exception. For an except clause with an expression, that\\nexpression is evaluated, and the clause matches the exception if the\\nresulting object is \"compatible\" with the exception. An object is\\ncompatible with an exception if it is the class or a base class of the\\nexception object or a tuple containing an item compatible with the\\nexception.\\n\\nIf no except clause matches the exception, the search for an exception\\nhandler continues in the surrounding code and on the invocation stack.\\n[1]\\n\\nIf the evaluation of an expression in the header of an except clause\\nraises an exception, the original search for a handler is canceled and\\na search starts for the new exception in the surrounding code and on\\nthe call stack (it is treated as if the entire ``try`` statement\\nraised the exception).\\n\\nWhen a matching except clause is found, the exception is assigned to\\nthe target specified after the ``as`` keyword in that except clause,\\nif present, and the except clause\\'s suite is executed. All except\\nclauses must have an executable block. When the end of this block is\\nreached, execution continues normally after the entire try statement.\\n(This means that if two nested handlers exist for the same exception,\\nand the exception occurs in the try clause of the inner handler, the\\nouter handler will not handle the exception.)\\n\\nWhen an exception has been assigned using ``as target``, it is cleared\\nat the end of the except clause. This is as if\\n\\n except E as N:\\n foo\\n\\nwas translated to\\n\\n except E as N:\\n try:\\n foo\\n finally:\\n del N\\n\\nThis means the exception must be assigned to a different name to be\\nable to refer to it after the except clause. Exceptions are cleared\\nbecause with the traceback attached to them, they form a reference\\ncycle with the stack frame, keeping all locals in that frame alive\\nuntil the next garbage collection occurs.\\n\\nBefore an except clause\\'s suite is executed, details about the\\nexception are stored in the ``sys`` module and can be access via\\n``sys.exc_info()``. ``sys.exc_info()`` returns a 3-tuple consisting of\\nthe exception class, the exception instance and a traceback object\\n(see section *The standard type hierarchy*) identifying the point in\\nthe program where the exception occurred. ``sys.exc_info()`` values\\nare restored to their previous values (before the call) when returning\\nfrom a function that handled an exception.\\n\\nThe optional ``else`` clause is executed if and when control flows off\\nthe end of the ``try`` clause. [2] Exceptions in the ``else`` clause\\nare not handled by the preceding ``except`` clauses.\\n\\nIf ``finally`` is present, it specifies a \\'cleanup\\' handler. The\\n``try`` clause is executed, including any ``except`` and ``else``\\nclauses. If an exception occurs in any of the clauses and is not\\nhandled, the exception is temporarily saved. The ``finally`` clause is\\nexecuted. If there is a saved exception it is re-raised at the end of\\nthe ``finally`` clause. If the ``finally`` clause raises another\\nexception, the saved exception is set as the context of the new\\nexception. If the ``finally`` clause executes a ``return`` or\\n``break`` statement, the saved exception is discarded:\\n\\n def f():\\n try:\\n 1/0\\n finally:\\n return 42\\n\\n >>> f()\\n 42\\n\\nThe exception information is not available to the program during\\nexecution of the ``finally`` clause.\\n\\nWhen a ``return``, ``break`` or ``continue`` statement is executed in\\nthe ``try`` suite of a ``try``...``finally`` statement, the\\n``finally`` clause is also executed \\'on the way out.\\' A ``continue``\\nstatement is illegal in the ``finally`` clause. (The reason is a\\nproblem with the current implementation --- this restriction may be\\nlifted in the future).\\n\\nAdditional information on exceptions can be found in section\\n*Exceptions*, and information on using the ``raise`` statement to\\ngenerate exceptions may be found in section *The raise statement*.\\n',\n'types':'\\nThe standard type hierarchy\\n***************************\\n\\nBelow is a list of the types that are built into Python. Extension\\nmodules (written in C, Java, or other languages, depending on the\\nimplementation) can define additional types. Future versions of\\nPython may add types to the type hierarchy (e.g., rational numbers,\\nefficiently stored arrays of integers, etc.), although such additions\\nwill often be provided via the standard library instead.\\n\\nSome of the type descriptions below contain a paragraph listing\\n\\'special attributes.\\' These are attributes that provide access to the\\nimplementation and are not intended for general use. Their definition\\nmay change in the future.\\n\\nNone\\n This type has a single value. There is a single object with this\\n value. This object is accessed through the built-in name ``None``.\\n It is used to signify the absence of a value in many situations,\\n e.g., it is returned from functions that don\\'t explicitly return\\n anything. Its truth value is false.\\n\\nNotImplemented\\n This type has a single value. There is a single object with this\\n value. This object is accessed through the built-in name\\n ``NotImplemented``. Numeric methods and rich comparison methods may\\n return this value if they do not implement the operation for the\\n operands provided. (The interpreter will then try the reflected\\n operation, or some other fallback, depending on the operator.) Its\\n truth value is true.\\n\\nEllipsis\\n This type has a single value. There is a single object with this\\n value. This object is accessed through the literal ``...`` or the\\n built-in name ``Ellipsis``. Its truth value is true.\\n\\n``numbers.Number``\\n These are created by numeric literals and returned as results by\\n arithmetic operators and arithmetic built-in functions. Numeric\\n objects are immutable; once created their value never changes.\\n Python numbers are of course strongly related to mathematical\\n numbers, but subject to the limitations of numerical representation\\n in computers.\\n\\n Python distinguishes between integers, floating point numbers, and\\n complex numbers:\\n\\n ``numbers.Integral``\\n These represent elements from the mathematical set of integers\\n (positive and negative).\\n\\n There are two types of integers:\\n\\n Integers (``int``)\\n\\n These represent numbers in an unlimited range, subject to\\n available (virtual) memory only. For the purpose of shift\\n and mask operations, a binary representation is assumed, and\\n negative numbers are represented in a variant of 2\\'s\\n complement which gives the illusion of an infinite string of\\n sign bits extending to the left.\\n\\n Booleans (``bool``)\\n These represent the truth values False and True. The two\\n objects representing the values False and True are the only\\n Boolean objects. The Boolean type is a subtype of the integer\\n type, and Boolean values behave like the values 0 and 1,\\n respectively, in almost all contexts, the exception being\\n that when converted to a string, the strings ``\"False\"`` or\\n ``\"True\"`` are returned, respectively.\\n\\n The rules for integer representation are intended to give the\\n most meaningful interpretation of shift and mask operations\\n involving negative integers.\\n\\n ``numbers.Real`` (``float``)\\n These represent machine-level double precision floating point\\n numbers. You are at the mercy of the underlying machine\\n architecture (and C or Java implementation) for the accepted\\n range and handling of overflow. Python does not support single-\\n precision floating point numbers; the savings in processor and\\n memory usage that are usually the reason for using these is\\n dwarfed by the overhead of using objects in Python, so there is\\n no reason to complicate the language with two kinds of floating\\n point numbers.\\n\\n ``numbers.Complex`` (``complex``)\\n These represent complex numbers as a pair of machine-level\\n double precision floating point numbers. The same caveats apply\\n as for floating point numbers. The real and imaginary parts of a\\n complex number ``z`` can be retrieved through the read-only\\n attributes ``z.real`` and ``z.imag``.\\n\\nSequences\\n These represent finite ordered sets indexed by non-negative\\n numbers. The built-in function ``len()`` returns the number of\\n items of a sequence. When the length of a sequence is *n*, the\\n index set contains the numbers 0, 1, ..., *n*-1. Item *i* of\\n sequence *a* is selected by ``a[i]``.\\n\\n Sequences also support slicing: ``a[i:j]`` selects all items with\\n index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an\\n expression, a slice is a sequence of the same type. This implies\\n that the index set is renumbered so that it starts at 0.\\n\\n Some sequences also support \"extended slicing\" with a third \"step\"\\n parameter: ``a[i:j:k]`` selects all items of *a* with index *x*\\n where ``x = i + n*k``, *n* ``>=`` ``0`` and *i* ``<=`` *x* ``<``\\n *j*.\\n\\n Sequences are distinguished according to their mutability:\\n\\n Immutable sequences\\n An object of an immutable sequence type cannot change once it is\\n created. (If the object contains references to other objects,\\n these other objects may be mutable and may be changed; however,\\n the collection of objects directly referenced by an immutable\\n object cannot change.)\\n\\n The following types are immutable sequences:\\n\\n Strings\\n A string is a sequence of values that represent Unicode\\n codepoints. All the codepoints in range ``U+0000 - U+10FFFF``\\n can be represented in a string. Python doesn\\'t have a\\n ``chr`` type, and every character in the string is\\n represented as a string object with length ``1``. The built-\\n in function ``ord()`` converts a character to its codepoint\\n (as an integer); ``chr()`` converts an integer in range ``0 -\\n 10FFFF`` to the corresponding character. ``str.encode()`` can\\n be used to convert a ``str`` to ``bytes`` using the given\\n encoding, and ``bytes.decode()`` can be used to achieve the\\n opposite.\\n\\n Tuples\\n The items of a tuple are arbitrary Python objects. Tuples of\\n two or more items are formed by comma-separated lists of\\n expressions. A tuple of one item (a \\'singleton\\') can be\\n formed by affixing a comma to an expression (an expression by\\n itself does not create a tuple, since parentheses must be\\n usable for grouping of expressions). An empty tuple can be\\n formed by an empty pair of parentheses.\\n\\n Bytes\\n A bytes object is an immutable array. The items are 8-bit\\n bytes, represented by integers in the range 0 <= x < 256.\\n Bytes literals (like ``b\\'abc\\'``) and the built-in function\\n ``bytes()`` can be used to construct bytes objects. Also,\\n bytes objects can be decoded to strings via the ``decode()``\\n method.\\n\\n Mutable sequences\\n Mutable sequences can be changed after they are created. The\\n subscription and slicing notations can be used as the target of\\n assignment and ``del`` (delete) statements.\\n\\n There are currently two intrinsic mutable sequence types:\\n\\n Lists\\n The items of a list are arbitrary Python objects. Lists are\\n formed by placing a comma-separated list of expressions in\\n square brackets. (Note that there are no special cases needed\\n to form lists of length 0 or 1.)\\n\\n Byte Arrays\\n A bytearray object is a mutable array. They are created by\\n the built-in ``bytearray()`` constructor. Aside from being\\n mutable (and hence unhashable), byte arrays otherwise provide\\n the same interface and functionality as immutable bytes\\n objects.\\n\\n The extension module ``array`` provides an additional example of\\n a mutable sequence type, as does the ``collections`` module.\\n\\nSet types\\n These represent unordered, finite sets of unique, immutable\\n objects. As such, they cannot be indexed by any subscript. However,\\n they can be iterated over, and the built-in function ``len()``\\n returns the number of items in a set. Common uses for sets are fast\\n membership testing, removing duplicates from a sequence, and\\n computing mathematical operations such as intersection, union,\\n difference, and symmetric difference.\\n\\n For set elements, the same immutability rules apply as for\\n dictionary keys. Note that numeric types obey the normal rules for\\n numeric comparison: if two numbers compare equal (e.g., ``1`` and\\n ``1.0``), only one of them can be contained in a set.\\n\\n There are currently two intrinsic set types:\\n\\n Sets\\n These represent a mutable set. They are created by the built-in\\n ``set()`` constructor and can be modified afterwards by several\\n methods, such as ``add()``.\\n\\n Frozen sets\\n These represent an immutable set. They are created by the\\n built-in ``frozenset()`` constructor. As a frozenset is\\n immutable and *hashable*, it can be used again as an element of\\n another set, or as a dictionary key.\\n\\nMappings\\n These represent finite sets of objects indexed by arbitrary index\\n sets. The subscript notation ``a[k]`` selects the item indexed by\\n ``k`` from the mapping ``a``; this can be used in expressions and\\n as the target of assignments or ``del`` statements. The built-in\\n function ``len()`` returns the number of items in a mapping.\\n\\n There is currently a single intrinsic mapping type:\\n\\n Dictionaries\\n These represent finite sets of objects indexed by nearly\\n arbitrary values. The only types of values not acceptable as\\n keys are values containing lists or dictionaries or other\\n mutable types that are compared by value rather than by object\\n identity, the reason being that the efficient implementation of\\n dictionaries requires a key\\'s hash value to remain constant.\\n Numeric types used for keys obey the normal rules for numeric\\n comparison: if two numbers compare equal (e.g., ``1`` and\\n ``1.0``) then they can be used interchangeably to index the same\\n dictionary entry.\\n\\n Dictionaries are mutable; they can be created by the ``{...}``\\n notation (see section *Dictionary displays*).\\n\\n The extension modules ``dbm.ndbm`` and ``dbm.gnu`` provide\\n additional examples of mapping types, as does the\\n ``collections`` module.\\n\\nCallable types\\n These are the types to which the function call operation (see\\n section *Calls*) can be applied:\\n\\n User-defined functions\\n A user-defined function object is created by a function\\n definition (see section *Function definitions*). It should be\\n called with an argument list containing the same number of items\\n as the function\\'s formal parameter list.\\n\\n Special attributes:\\n\\n +---------------------------+---------------------------------+-------------+\\n | Attribute | Meaning | |\\n +===========================+=================================+=============+\\n | ``__doc__`` | The function\\'s documentation | Writable |\\n | | string, or ``None`` if | |\\n | | unavailable | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__name__`` | The function\\'s name | Writable |\\n +---------------------------+---------------------------------+-------------+\\n | ``__qualname__`` | The function\\'s *qualified name* | Writable |\\n | | New in version 3.3. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__module__`` | The name of the module the | Writable |\\n | | function was defined in, or | |\\n | | ``None`` if unavailable. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__defaults__`` | A tuple containing default | Writable |\\n | | argument values for those | |\\n | | arguments that have defaults, | |\\n | | or ``None`` if no arguments | |\\n | | have a default value | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__code__`` | The code object representing | Writable |\\n | | the compiled function body. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__globals__`` | A reference to the dictionary | Read-only |\\n | | that holds the function\\'s | |\\n | | global variables --- the global | |\\n | | namespace of the module in | |\\n | | which the function was defined. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__dict__`` | The namespace supporting | Writable |\\n | | arbitrary function attributes. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__closure__`` | ``None`` or a tuple of cells | Read-only |\\n | | that contain bindings for the | |\\n | | function\\'s free variables. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__annotations__`` | A dict containing annotations | Writable |\\n | | of parameters. The keys of the | |\\n | | dict are the parameter names, | |\\n | | or ``\\'return\\'`` for the return | |\\n | | annotation, if provided. | |\\n +---------------------------+---------------------------------+-------------+\\n | ``__kwdefaults__`` | A dict containing defaults for | Writable |\\n | | keyword-only parameters. | |\\n +---------------------------+---------------------------------+-------------+\\n\\n Most of the attributes labelled \"Writable\" check the type of the\\n assigned value.\\n\\n Function objects also support getting and setting arbitrary\\n attributes, which can be used, for example, to attach metadata\\n to functions. Regular attribute dot-notation is used to get and\\n set such attributes. *Note that the current implementation only\\n supports function attributes on user-defined functions. Function\\n attributes on built-in functions may be supported in the\\n future.*\\n\\n Additional information about a function\\'s definition can be\\n retrieved from its code object; see the description of internal\\n types below.\\n\\n Instance methods\\n An instance method object combines a class, a class instance and\\n any callable object (normally a user-defined function).\\n\\n Special read-only attributes: ``__self__`` is the class instance\\n object, ``__func__`` is the function object; ``__doc__`` is the\\n method\\'s documentation (same as ``__func__.__doc__``);\\n ``__name__`` is the method name (same as ``__func__.__name__``);\\n ``__module__`` is the name of the module the method was defined\\n in, or ``None`` if unavailable.\\n\\n Methods also support accessing (but not setting) the arbitrary\\n function attributes on the underlying function object.\\n\\n User-defined method objects may be created when getting an\\n attribute of a class (perhaps via an instance of that class), if\\n that attribute is a user-defined function object or a class\\n method object.\\n\\n When an instance method object is created by retrieving a user-\\n defined function object from a class via one of its instances,\\n its ``__self__`` attribute is the instance, and the method\\n object is said to be bound. The new method\\'s ``__func__``\\n attribute is the original function object.\\n\\n When a user-defined method object is created by retrieving\\n another method object from a class or instance, the behaviour is\\n the same as for a function object, except that the ``__func__``\\n attribute of the new instance is not the original method object\\n but its ``__func__`` attribute.\\n\\n When an instance method object is created by retrieving a class\\n method object from a class or instance, its ``__self__``\\n attribute is the class itself, and its ``__func__`` attribute is\\n the function object underlying the class method.\\n\\n When an instance method object is called, the underlying\\n function (``__func__``) is called, inserting the class instance\\n (``__self__``) in front of the argument list. For instance,\\n when ``C`` is a class which contains a definition for a function\\n ``f()``, and ``x`` is an instance of ``C``, calling ``x.f(1)``\\n is equivalent to calling ``C.f(x, 1)``.\\n\\n When an instance method object is derived from a class method\\n object, the \"class instance\" stored in ``__self__`` will\\n actually be the class itself, so that calling either ``x.f(1)``\\n or ``C.f(1)`` is equivalent to calling ``f(C,1)`` where ``f`` is\\n the underlying function.\\n\\n Note that the transformation from function object to instance\\n method object happens each time the attribute is retrieved from\\n the instance. In some cases, a fruitful optimization is to\\n assign the attribute to a local variable and call that local\\n variable. Also notice that this transformation only happens for\\n user-defined functions; other callable objects (and all non-\\n callable objects) are retrieved without transformation. It is\\n also important to note that user-defined functions which are\\n attributes of a class instance are not converted to bound\\n methods; this *only* happens when the function is an attribute\\n of the class.\\n\\n Generator functions\\n A function or method which uses the ``yield`` statement (see\\n section *The yield statement*) is called a *generator function*.\\n Such a function, when called, always returns an iterator object\\n which can be used to execute the body of the function: calling\\n the iterator\\'s ``iterator__next__()`` method will cause the\\n function to execute until it provides a value using the\\n ``yield`` statement. When the function executes a ``return``\\n statement or falls off the end, a ``StopIteration`` exception is\\n raised and the iterator will have reached the end of the set of\\n values to be returned.\\n\\n Built-in functions\\n A built-in function object is a wrapper around a C function.\\n Examples of built-in functions are ``len()`` and ``math.sin()``\\n (``math`` is a standard built-in module). The number and type of\\n the arguments are determined by the C function. Special read-\\n only attributes: ``__doc__`` is the function\\'s documentation\\n string, or ``None`` if unavailable; ``__name__`` is the\\n function\\'s name; ``__self__`` is set to ``None`` (but see the\\n next item); ``__module__`` is the name of the module the\\n function was defined in or ``None`` if unavailable.\\n\\n Built-in methods\\n This is really a different disguise of a built-in function, this\\n time containing an object passed to the C function as an\\n implicit extra argument. An example of a built-in method is\\n ``alist.append()``, assuming *alist* is a list object. In this\\n case, the special read-only attribute ``__self__`` is set to the\\n object denoted by *alist*.\\n\\n Classes\\n Classes are callable. These objects normally act as factories\\n for new instances of themselves, but variations are possible for\\n class types that override ``__new__()``. The arguments of the\\n call are passed to ``__new__()`` and, in the typical case, to\\n ``__init__()`` to initialize the new instance.\\n\\n Class Instances\\n Instances of arbitrary classes can be made callable by defining\\n a ``__call__()`` method in their class.\\n\\nModules\\n Modules are a basic organizational unit of Python code, and are\\n created by the *import system* as invoked either by the ``import``\\n statement (see ``import``), or by calling functions such as\\n ``importlib.import_module()`` and built-in ``__import__()``. A\\n module object has a namespace implemented by a dictionary object\\n (this is the dictionary referenced by the ``__globals__`` attribute\\n of functions defined in the module). Attribute references are\\n translated to lookups in this dictionary, e.g., ``m.x`` is\\n equivalent to ``m.__dict__[\"x\"]``. A module object does not contain\\n the code object used to initialize the module (since it isn\\'t\\n needed once the initialization is done).\\n\\n Attribute assignment updates the module\\'s namespace dictionary,\\n e.g., ``m.x = 1`` is equivalent to ``m.__dict__[\"x\"] = 1``.\\n\\n Special read-only attribute: ``__dict__`` is the module\\'s namespace\\n as a dictionary object.\\n\\n **CPython implementation detail:** Because of the way CPython\\n clears module dictionaries, the module dictionary will be cleared\\n when the module falls out of scope even if the dictionary still has\\n live references. To avoid this, copy the dictionary or keep the\\n module around while using its dictionary directly.\\n\\n Predefined (writable) attributes: ``__name__`` is the module\\'s\\n name; ``__doc__`` is the module\\'s documentation string, or ``None``\\n if unavailable; ``__file__`` is the pathname of the file from which\\n the module was loaded, if it was loaded from a file. The\\n ``__file__`` attribute may be missing for certain types of modules,\\n such as C modules that are statically linked into the interpreter;\\n for extension modules loaded dynamically from a shared library, it\\n is the pathname of the shared library file.\\n\\nCustom classes\\n Custom class types are typically created by class definitions (see\\n section *Class definitions*). A class has a namespace implemented\\n by a dictionary object. Class attribute references are translated\\n to lookups in this dictionary, e.g., ``C.x`` is translated to\\n ``C.__dict__[\"x\"]`` (although there are a number of hooks which\\n allow for other means of locating attributes). When the attribute\\n name is not found there, the attribute search continues in the base\\n classes. This search of the base classes uses the C3 method\\n resolution order which behaves correctly even in the presence of\\n \\'diamond\\' inheritance structures where there are multiple\\n inheritance paths leading back to a common ancestor. Additional\\n details on the C3 MRO used by Python can be found in the\\n documentation accompanying the 2.3 release at\\n http://www.python.org/download/releases/2.3/mro/.\\n\\n When a class attribute reference (for class ``C``, say) would yield\\n a class method object, it is transformed into an instance method\\n object whose ``__self__`` attributes is ``C``. When it would yield\\n a static method object, it is transformed into the object wrapped\\n by the static method object. See section *Implementing Descriptors*\\n for another way in which attributes retrieved from a class may\\n differ from those actually contained in its ``__dict__``.\\n\\n Class attribute assignments update the class\\'s dictionary, never\\n the dictionary of a base class.\\n\\n A class object can be called (see above) to yield a class instance\\n (see below).\\n\\n Special attributes: ``__name__`` is the class name; ``__module__``\\n is the module name in which the class was defined; ``__dict__`` is\\n the dictionary containing the class\\'s namespace; ``__bases__`` is a\\n tuple (possibly empty or a singleton) containing the base classes,\\n in the order of their occurrence in the base class list;\\n ``__doc__`` is the class\\'s documentation string, or None if\\n undefined.\\n\\nClass instances\\n A class instance is created by calling a class object (see above).\\n A class instance has a namespace implemented as a dictionary which\\n is the first place in which attribute references are searched.\\n When an attribute is not found there, and the instance\\'s class has\\n an attribute by that name, the search continues with the class\\n attributes. If a class attribute is found that is a user-defined\\n function object, it is transformed into an instance method object\\n whose ``__self__`` attribute is the instance. Static method and\\n class method objects are also transformed; see above under\\n \"Classes\". See section *Implementing Descriptors* for another way\\n in which attributes of a class retrieved via its instances may\\n differ from the objects actually stored in the class\\'s\\n ``__dict__``. If no class attribute is found, and the object\\'s\\n class has a ``__getattr__()`` method, that is called to satisfy the\\n lookup.\\n\\n Attribute assignments and deletions update the instance\\'s\\n dictionary, never a class\\'s dictionary. If the class has a\\n ``__setattr__()`` or ``__delattr__()`` method, this is called\\n instead of updating the instance dictionary directly.\\n\\n Class instances can pretend to be numbers, sequences, or mappings\\n if they have methods with certain special names. See section\\n *Special method names*.\\n\\n Special attributes: ``__dict__`` is the attribute dictionary;\\n ``__class__`` is the instance\\'s class.\\n\\nI/O objects (also known as file objects)\\n A *file object* represents an open file. Various shortcuts are\\n available to create file objects: the ``open()`` built-in function,\\n and also ``os.popen()``, ``os.fdopen()``, and the ``makefile()``\\n method of socket objects (and perhaps by other functions or methods\\n provided by extension modules).\\n\\n The objects ``sys.stdin``, ``sys.stdout`` and ``sys.stderr`` are\\n initialized to file objects corresponding to the interpreter\\'s\\n standard input, output and error streams; they are all open in text\\n mode and therefore follow the interface defined by the\\n ``io.TextIOBase`` abstract class.\\n\\nInternal types\\n A few types used internally by the interpreter are exposed to the\\n user. Their definitions may change with future versions of the\\n interpreter, but they are mentioned here for completeness.\\n\\n Code objects\\n Code objects represent *byte-compiled* executable Python code,\\n or *bytecode*. The difference between a code object and a\\n function object is that the function object contains an explicit\\n reference to the function\\'s globals (the module in which it was\\n defined), while a code object contains no context; also the\\n default argument values are stored in the function object, not\\n in the code object (because they represent values calculated at\\n run-time). Unlike function objects, code objects are immutable\\n and contain no references (directly or indirectly) to mutable\\n objects.\\n\\n Special read-only attributes: ``co_name`` gives the function\\n name; ``co_argcount`` is the number of positional arguments\\n (including arguments with default values); ``co_nlocals`` is the\\n number of local variables used by the function (including\\n arguments); ``co_varnames`` is a tuple containing the names of\\n the local variables (starting with the argument names);\\n ``co_cellvars`` is a tuple containing the names of local\\n variables that are referenced by nested functions;\\n ``co_freevars`` is a tuple containing the names of free\\n variables; ``co_code`` is a string representing the sequence of\\n bytecode instructions; ``co_consts`` is a tuple containing the\\n literals used by the bytecode; ``co_names`` is a tuple\\n containing the names used by the bytecode; ``co_filename`` is\\n the filename from which the code was compiled;\\n ``co_firstlineno`` is the first line number of the function;\\n ``co_lnotab`` is a string encoding the mapping from bytecode\\n offsets to line numbers (for details see the source code of the\\n interpreter); ``co_stacksize`` is the required stack size\\n (including local variables); ``co_flags`` is an integer encoding\\n a number of flags for the interpreter.\\n\\n The following flag bits are defined for ``co_flags``: bit\\n ``0x04`` is set if the function uses the ``*arguments`` syntax\\n to accept an arbitrary number of positional arguments; bit\\n ``0x08`` is set if the function uses the ``**keywords`` syntax\\n to accept arbitrary keyword arguments; bit ``0x20`` is set if\\n the function is a generator.\\n\\n Future feature declarations (``from __future__ import\\n division``) also use bits in ``co_flags`` to indicate whether a\\n code object was compiled with a particular feature enabled: bit\\n ``0x2000`` is set if the function was compiled with future\\n division enabled; bits ``0x10`` and ``0x1000`` were used in\\n earlier versions of Python.\\n\\n Other bits in ``co_flags`` are reserved for internal use.\\n\\n If a code object represents a function, the first item in\\n ``co_consts`` is the documentation string of the function, or\\n ``None`` if undefined.\\n\\n Frame objects\\n Frame objects represent execution frames. They may occur in\\n traceback objects (see below).\\n\\n Special read-only attributes: ``f_back`` is to the previous\\n stack frame (towards the caller), or ``None`` if this is the\\n bottom stack frame; ``f_code`` is the code object being executed\\n in this frame; ``f_locals`` is the dictionary used to look up\\n local variables; ``f_globals`` is used for global variables;\\n ``f_builtins`` is used for built-in (intrinsic) names;\\n ``f_lasti`` gives the precise instruction (this is an index into\\n the bytecode string of the code object).\\n\\n Special writable attributes: ``f_trace``, if not ``None``, is a\\n function called at the start of each source code line (this is\\n used by the debugger); ``f_lineno`` is the current line number\\n of the frame --- writing to this from within a trace function\\n jumps to the given line (only for the bottom-most frame). A\\n debugger can implement a Jump command (aka Set Next Statement)\\n by writing to f_lineno.\\n\\n Traceback objects\\n Traceback objects represent a stack trace of an exception. A\\n traceback object is created when an exception occurs. When the\\n search for an exception handler unwinds the execution stack, at\\n each unwound level a traceback object is inserted in front of\\n the current traceback. When an exception handler is entered,\\n the stack trace is made available to the program. (See section\\n *The try statement*.) It is accessible as the third item of the\\n tuple returned by ``sys.exc_info()``. When the program contains\\n no suitable handler, the stack trace is written (nicely\\n formatted) to the standard error stream; if the interpreter is\\n interactive, it is also made available to the user as\\n ``sys.last_traceback``.\\n\\n Special read-only attributes: ``tb_next`` is the next level in\\n the stack trace (towards the frame where the exception\\n occurred), or ``None`` if there is no next level; ``tb_frame``\\n points to the execution frame of the current level;\\n ``tb_lineno`` gives the line number where the exception\\n occurred; ``tb_lasti`` indicates the precise instruction. The\\n line number and last instruction in the traceback may differ\\n from the line number of its frame object if the exception\\n occurred in a ``try`` statement with no matching except clause\\n or with a finally clause.\\n\\n Slice objects\\n Slice objects are used to represent slices for ``__getitem__()``\\n methods. They are also created by the built-in ``slice()``\\n function.\\n\\n Special read-only attributes: ``start`` is the lower bound;\\n ``stop`` is the upper bound; ``step`` is the step value; each is\\n ``None`` if omitted. These attributes can have any type.\\n\\n Slice objects support one method:\\n\\n slice.indices(self, length)\\n\\n This method takes a single integer argument *length* and\\n computes information about the slice that the slice object\\n would describe if applied to a sequence of *length* items.\\n It returns a tuple of three integers; respectively these are\\n the *start* and *stop* indices and the *step* or stride\\n length of the slice. Missing or out-of-bounds indices are\\n handled in a manner consistent with regular slices.\\n\\n Static method objects\\n Static method objects provide a way of defeating the\\n transformation of function objects to method objects described\\n above. A static method object is a wrapper around any other\\n object, usually a user-defined method object. When a static\\n method object is retrieved from a class or a class instance, the\\n object actually returned is the wrapped object, which is not\\n subject to any further transformation. Static method objects are\\n not themselves callable, although the objects they wrap usually\\n are. Static method objects are created by the built-in\\n ``staticmethod()`` constructor.\\n\\n Class method objects\\n A class method object, like a static method object, is a wrapper\\n around another object that alters the way in which that object\\n is retrieved from classes and class instances. The behaviour of\\n class method objects upon such retrieval is described above,\\n under \"User-defined methods\". Class method objects are created\\n by the built-in ``classmethod()`` constructor.\\n',\n'typesfunctions':'\\nFunctions\\n*********\\n\\nFunction objects are created by function definitions. The only\\noperation on a function object is to call it: ``func(argument-list)``.\\n\\nThere are really two flavors of function objects: built-in functions\\nand user-defined functions. Both support the same operation (to call\\nthe function), but the implementation is different, hence the\\ndifferent object types.\\n\\nSee *Function definitions* for more information.\\n',\n'typesmapping':'\\nMapping Types --- ``dict``\\n**************************\\n\\nA *mapping* object maps *hashable* values to arbitrary objects.\\nMappings are mutable objects. There is currently only one standard\\nmapping type, the *dictionary*. (For other containers see the built-\\nin ``list``, ``set``, and ``tuple`` classes, and the ``collections``\\nmodule.)\\n\\nA dictionary\\'s keys are *almost* arbitrary values. Values that are\\nnot *hashable*, that is, values containing lists, dictionaries or\\nother mutable types (that are compared by value rather than by object\\nidentity) may not be used as keys. Numeric types used for keys obey\\nthe normal rules for numeric comparison: if two numbers compare equal\\n(such as ``1`` and ``1.0``) then they can be used interchangeably to\\nindex the same dictionary entry. (Note however, that since computers\\nstore floating-point numbers as approximations it is usually unwise to\\nuse them as dictionary keys.)\\n\\nDictionaries can be created by placing a comma-separated list of\\n``key: value`` pairs within braces, for example: ``{\\'jack\\': 4098,\\n\\'sjoerd\\': 4127}`` or ``{4098: \\'jack\\', 4127: \\'sjoerd\\'}``, or by the\\n``dict`` constructor.\\n\\nclass class dict(**kwarg)\\nclass class dict(mapping, **kwarg)\\nclass class dict(iterable, **kwarg)\\n\\n Return a new dictionary initialized from an optional positional\\n argument and a possibly empty set of keyword arguments.\\n\\n If no positional argument is given, an empty dictionary is created.\\n If a positional argument is given and it is a mapping object, a\\n dictionary is created with the same key-value pairs as the mapping\\n object. Otherwise, the positional argument must be an *iterator*\\n object. Each item in the iterable must itself be an iterator with\\n exactly two objects. The first object of each item becomes a key\\n in the new dictionary, and the second object the corresponding\\n value. If a key occurs more than once, the last value for that key\\n becomes the corresponding value in the new dictionary.\\n\\n If keyword arguments are given, the keyword arguments and their\\n values are added to the dictionary created from the positional\\n argument. If a key being added is already present, the value from\\n the keyword argument replaces the value from the positional\\n argument.\\n\\n To illustrate, the following examples all return a dictionary equal\\n to ``{\"one\": 1, \"two\": 2, \"three\": 3}``:\\n\\n >>> a = dict(one=1, two=2, three=3)\\n >>> b = {\\'one\\': 1, \\'two\\': 2, \\'three\\': 3}\\n >>> c = dict(zip([\\'one\\', \\'two\\', \\'three\\'], [1, 2, 3]))\\n >>> d = dict([(\\'two\\', 2), (\\'one\\', 1), (\\'three\\', 3)])\\n >>> e = dict({\\'three\\': 3, \\'one\\': 1, \\'two\\': 2})\\n >>> a == b == c == d == e\\n True\\n\\n Providing keyword arguments as in the first example only works for\\n keys that are valid Python identifiers. Otherwise, any valid keys\\n can be used.\\n\\n These are the operations that dictionaries support (and therefore,\\n custom mapping types should support too):\\n\\n len(d)\\n\\n Return the number of items in the dictionary *d*.\\n\\n d[key]\\n\\n Return the item of *d* with key *key*. Raises a ``KeyError`` if\\n *key* is not in the map.\\n\\n If a subclass of dict defines a method ``__missing__()``, if the\\n key *key* is not present, the ``d[key]`` operation calls that\\n method with the key *key* as argument. The ``d[key]`` operation\\n then returns or raises whatever is returned or raised by the\\n ``__missing__(key)`` call if the key is not present. No other\\n operations or methods invoke ``__missing__()``. If\\n ``__missing__()`` is not defined, ``KeyError`` is raised.\\n ``__missing__()`` must be a method; it cannot be an instance\\n variable:\\n\\n >>> class Counter(dict):\\n ... def __missing__(self, key):\\n ... return 0\\n >>> c = Counter()\\n >>> c[\\'red\\']\\n 0\\n >>> c[\\'red\\'] += 1\\n >>> c[\\'red\\']\\n 1\\n\\n See ``collections.Counter`` for a complete implementation\\n including other methods helpful for accumulating and managing\\n tallies.\\n\\n d[key] = value\\n\\n Set ``d[key]`` to *value*.\\n\\n del d[key]\\n\\n Remove ``d[key]`` from *d*. Raises a ``KeyError`` if *key* is\\n not in the map.\\n\\n key in d\\n\\n Return ``True`` if *d* has a key *key*, else ``False``.\\n\\n key not in d\\n\\n Equivalent to ``not key in d``.\\n\\n iter(d)\\n\\n Return an iterator over the keys of the dictionary. This is a\\n shortcut for ``iter(d.keys())``.\\n\\n clear()\\n\\n Remove all items from the dictionary.\\n\\n copy()\\n\\n Return a shallow copy of the dictionary.\\n\\n classmethod fromkeys(seq[, value])\\n\\n Create a new dictionary with keys from *seq* and values set to\\n *value*.\\n\\n ``fromkeys()`` is a class method that returns a new dictionary.\\n *value* defaults to ``None``.\\n\\n get(key[, default])\\n\\n Return the value for *key* if *key* is in the dictionary, else\\n *default*. If *default* is not given, it defaults to ``None``,\\n so that this method never raises a ``KeyError``.\\n\\n items()\\n\\n Return a new view of the dictionary\\'s items (``(key, value)``\\n pairs). See the *documentation of view objects*.\\n\\n keys()\\n\\n Return a new view of the dictionary\\'s keys. See the\\n *documentation of view objects*.\\n\\n pop(key[, default])\\n\\n If *key* is in the dictionary, remove it and return its value,\\n else return *default*. If *default* is not given and *key* is\\n not in the dictionary, a ``KeyError`` is raised.\\n\\n popitem()\\n\\n Remove and return an arbitrary ``(key, value)`` pair from the\\n dictionary.\\n\\n ``popitem()`` is useful to destructively iterate over a\\n dictionary, as often used in set algorithms. If the dictionary\\n is empty, calling ``popitem()`` raises a ``KeyError``.\\n\\n setdefault(key[, default])\\n\\n If *key* is in the dictionary, return its value. If not, insert\\n *key* with a value of *default* and return *default*. *default*\\n defaults to ``None``.\\n\\n update([other])\\n\\n Update the dictionary with the key/value pairs from *other*,\\n overwriting existing keys. Return ``None``.\\n\\n ``update()`` accepts either another dictionary object or an\\n iterable of key/value pairs (as tuples or other iterables of\\n length two). If keyword arguments are specified, the dictionary\\n is then updated with those key/value pairs: ``d.update(red=1,\\n blue=2)``.\\n\\n values()\\n\\n Return a new view of the dictionary\\'s values. See the\\n *documentation of view objects*.\\n\\nSee also:\\n\\n ``types.MappingProxyType`` can be used to create a read-only view\\n of a ``dict``.\\n\\n\\nDictionary view objects\\n=======================\\n\\nThe objects returned by ``dict.keys()``, ``dict.values()`` and\\n``dict.items()`` are *view objects*. They provide a dynamic view on\\nthe dictionary\\'s entries, which means that when the dictionary\\nchanges, the view reflects these changes.\\n\\nDictionary views can be iterated over to yield their respective data,\\nand support membership tests:\\n\\nlen(dictview)\\n\\n Return the number of entries in the dictionary.\\n\\niter(dictview)\\n\\n Return an iterator over the keys, values or items (represented as\\n tuples of ``(key, value)``) in the dictionary.\\n\\n Keys and values are iterated over in an arbitrary order which is\\n non-random, varies across Python implementations, and depends on\\n the dictionary\\'s history of insertions and deletions. If keys,\\n values and items views are iterated over with no intervening\\n modifications to the dictionary, the order of items will directly\\n correspond. This allows the creation of ``(value, key)`` pairs\\n using ``zip()``: ``pairs = zip(d.values(), d.keys())``. Another\\n way to create the same list is ``pairs = [(v, k) for (k, v) in\\n d.items()]``.\\n\\n Iterating views while adding or deleting entries in the dictionary\\n may raise a ``RuntimeError`` or fail to iterate over all entries.\\n\\nx in dictview\\n\\n Return ``True`` if *x* is in the underlying dictionary\\'s keys,\\n values or items (in the latter case, *x* should be a ``(key,\\n value)`` tuple).\\n\\nKeys views are set-like since their entries are unique and hashable.\\nIf all values are hashable, so that ``(key, value)`` pairs are unique\\nand hashable, then the items view is also set-like. (Values views are\\nnot treated as set-like since the entries are generally not unique.)\\nFor set-like views, all of the operations defined for the abstract\\nbase class ``collections.abc.Set`` are available (for example, ``==``,\\n``<``, or ``^``).\\n\\nAn example of dictionary view usage:\\n\\n >>> dishes = {\\'eggs\\': 2, \\'sausage\\': 1, \\'bacon\\': 1, \\'spam\\': 500}\\n >>> keys = dishes.keys()\\n >>> values = dishes.values()\\n\\n >>> # iteration\\n >>> n = 0\\n >>> for val in values:\\n ... n += val\\n >>> print(n)\\n 504\\n\\n >>> # keys and values are iterated over in the same order\\n >>> list(keys)\\n [\\'eggs\\', \\'bacon\\', \\'sausage\\', \\'spam\\']\\n >>> list(values)\\n [2, 1, 1, 500]\\n\\n >>> # view objects are dynamic and reflect dict changes\\n >>> del dishes[\\'eggs\\']\\n >>> del dishes[\\'sausage\\']\\n >>> list(keys)\\n [\\'spam\\', \\'bacon\\']\\n\\n >>> # set operations\\n >>> keys & {\\'eggs\\', \\'bacon\\', \\'salad\\'}\\n {\\'bacon\\'}\\n >>> keys ^ {\\'sausage\\', \\'juice\\'}\\n {\\'juice\\', \\'sausage\\', \\'bacon\\', \\'spam\\'}\\n',\n'typesmethods':'\\nMethods\\n*******\\n\\nMethods are functions that are called using the attribute notation.\\nThere are two flavors: built-in methods (such as ``append()`` on\\nlists) and class instance methods. Built-in methods are described\\nwith the types that support them.\\n\\nIf you access a method (a function defined in a class namespace)\\nthrough an instance, you get a special object: a *bound method* (also\\ncalled *instance method*) object. When called, it will add the\\n``self`` argument to the argument list. Bound methods have two\\nspecial read-only attributes: ``m.__self__`` is the object on which\\nthe method operates, and ``m.__func__`` is the function implementing\\nthe method. Calling ``m(arg-1, arg-2, ..., arg-n)`` is completely\\nequivalent to calling ``m.__func__(m.__self__, arg-1, arg-2, ...,\\narg-n)``.\\n\\nLike function objects, bound method objects support getting arbitrary\\nattributes. However, since method attributes are actually stored on\\nthe underlying function object (``meth.__func__``), setting method\\nattributes on bound methods is disallowed. Attempting to set an\\nattribute on a method results in an ``AttributeError`` being raised.\\nIn order to set a method attribute, you need to explicitly set it on\\nthe underlying function object:\\n\\n >>> class C:\\n ... def method(self):\\n ... pass\\n ...\\n >>> c = C()\\n >>> c.method.whoami = \\'my name is method\\' # can\\'t set on the method\\n Traceback (most recent call last):\\n File \"\", line 1, in \\n AttributeError: \\'method\\' object has no attribute \\'whoami\\'\\n >>> c.method.__func__.whoami = \\'my name is method\\'\\n >>> c.method.whoami\\n \\'my name is method\\'\\n\\nSee *The standard type hierarchy* for more information.\\n',\n'typesmodules':\"\\nModules\\n*******\\n\\nThe only special operation on a module is attribute access:\\n``m.name``, where *m* is a module and *name* accesses a name defined\\nin *m*'s symbol table. Module attributes can be assigned to. (Note\\nthat the ``import`` statement is not, strictly speaking, an operation\\non a module object; ``import foo`` does not require a module object\\nnamed *foo* to exist, rather it requires an (external) *definition*\\nfor a module named *foo* somewhere.)\\n\\nA special attribute of every module is ``__dict__``. This is the\\ndictionary containing the module's symbol table. Modifying this\\ndictionary will actually change the module's symbol table, but direct\\nassignment to the ``__dict__`` attribute is not possible (you can\\nwrite ``m.__dict__['a'] = 1``, which defines ``m.a`` to be ``1``, but\\nyou can't write ``m.__dict__ = {}``). Modifying ``__dict__`` directly\\nis not recommended.\\n\\nModules built into the interpreter are written like this: ````. If loaded from a file, they are written as\\n````.\\n\",\n'typesseq':'\\nSequence Types --- ``list``, ``tuple``, ``range``\\n*************************************************\\n\\nThere are three basic sequence types: lists, tuples, and range\\nobjects. Additional sequence types tailored for processing of *binary\\ndata* and *text strings* are described in dedicated sections.\\n\\n\\nCommon Sequence Operations\\n==========================\\n\\nThe operations in the following table are supported by most sequence\\ntypes, both mutable and immutable. The ``collections.abc.Sequence``\\nABC is provided to make it easier to correctly implement these\\noperations on custom sequence types.\\n\\nThis table lists the sequence operations sorted in ascending priority\\n(operations in the same box have the same priority). In the table,\\n*s* and *t* are sequences of the same type, *n*, *i*, *j* and *k* are\\nintegers and *x* is an arbitrary object that meets any type and value\\nrestrictions imposed by *s*.\\n\\nThe ``in`` and ``not in`` operations have the same priorities as the\\ncomparison operations. The ``+`` (concatenation) and ``*``\\n(repetition) operations have the same priority as the corresponding\\nnumeric operations.\\n\\n+----------------------------+----------------------------------+------------+\\n| Operation | Result | Notes |\\n+============================+==================================+============+\\n| ``x in s`` | ``True`` if an item of *s* is | (1) |\\n| | equal to *x*, else ``False`` | |\\n+----------------------------+----------------------------------+------------+\\n| ``x not in s`` | ``False`` if an item of *s* is | (1) |\\n| | equal to *x*, else ``True`` | |\\n+----------------------------+----------------------------------+------------+\\n| ``s + t`` | the concatenation of *s* and *t* | (6)(7) |\\n+----------------------------+----------------------------------+------------+\\n| ``s * n`` or ``n * s`` | *n* shallow copies of *s* | (2)(7) |\\n| | concatenated | |\\n+----------------------------+----------------------------------+------------+\\n| ``s[i]`` | *i*th item of *s*, origin 0 | (3) |\\n+----------------------------+----------------------------------+------------+\\n| ``s[i:j]`` | slice of *s* from *i* to *j* | (3)(4) |\\n+----------------------------+----------------------------------+------------+\\n| ``s[i:j:k]`` | slice of *s* from *i* to *j* | (3)(5) |\\n| | with step *k* | |\\n+----------------------------+----------------------------------+------------+\\n| ``len(s)`` | length of *s* | |\\n+----------------------------+----------------------------------+------------+\\n| ``min(s)`` | smallest item of *s* | |\\n+----------------------------+----------------------------------+------------+\\n| ``max(s)`` | largest item of *s* | |\\n+----------------------------+----------------------------------+------------+\\n| ``s.index(x[, i[, j]])`` | index of the first occurence of | (8) |\\n| | *x* in *s* (at or after index | |\\n| | *i* and before index *j*) | |\\n+----------------------------+----------------------------------+------------+\\n| ``s.count(x)`` | total number of occurences of | |\\n| | *x* in *s* | |\\n+----------------------------+----------------------------------+------------+\\n\\nSequences of the same type also support comparisons. In particular,\\ntuples and lists are compared lexicographically by comparing\\ncorresponding elements. This means that to compare equal, every\\nelement must compare equal and the two sequences must be of the same\\ntype and have the same length. (For full details see *Comparisons* in\\nthe language reference.)\\n\\nNotes:\\n\\n1. While the ``in`` and ``not in`` operations are used only for simple\\n containment testing in the general case, some specialised sequences\\n (such as ``str``, ``bytes`` and ``bytearray``) also use them for\\n subsequence testing:\\n\\n >>> \"gg\" in \"eggs\"\\n True\\n\\n2. Values of *n* less than ``0`` are treated as ``0`` (which yields an\\n empty sequence of the same type as *s*). Note also that the copies\\n are shallow; nested structures are not copied. This often haunts\\n new Python programmers; consider:\\n\\n >>> lists = [[]] * 3\\n >>> lists\\n [[], [], []]\\n >>> lists[0].append(3)\\n >>> lists\\n [[3], [3], [3]]\\n\\n What has happened is that ``[[]]`` is a one-element list containing\\n an empty list, so all three elements of ``[[]] * 3`` are (pointers\\n to) this single empty list. Modifying any of the elements of\\n ``lists`` modifies this single list. You can create a list of\\n different lists this way:\\n\\n >>> lists = [[] for i in range(3)]\\n >>> lists[0].append(3)\\n >>> lists[1].append(5)\\n >>> lists[2].append(7)\\n >>> lists\\n [[3], [5], [7]]\\n\\n3. If *i* or *j* is negative, the index is relative to the end of the\\n string: ``len(s) + i`` or ``len(s) + j`` is substituted. But note\\n that ``-0`` is still ``0``.\\n\\n4. The slice of *s* from *i* to *j* is defined as the sequence of\\n items with index *k* such that ``i <= k < j``. If *i* or *j* is\\n greater than ``len(s)``, use ``len(s)``. If *i* is omitted or\\n ``None``, use ``0``. If *j* is omitted or ``None``, use\\n ``len(s)``. If *i* is greater than or equal to *j*, the slice is\\n empty.\\n\\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\\n sequence of items with index ``x = i + n*k`` such that ``0 <= n <\\n (j-i)/k``. In other words, the indices are ``i``, ``i+k``,\\n ``i+2*k``, ``i+3*k`` and so on, stopping when *j* is reached (but\\n never including *j*). If *i* or *j* is greater than ``len(s)``,\\n use ``len(s)``. If *i* or *j* are omitted or ``None``, they become\\n \"end\" values (which end depends on the sign of *k*). Note, *k*\\n cannot be zero. If *k* is ``None``, it is treated like ``1``.\\n\\n6. Concatenating immutable sequences always results in a new object.\\n This means that building up a sequence by repeated concatenation\\n will have a quadratic runtime cost in the total sequence length.\\n To get a linear runtime cost, you must switch to one of the\\n alternatives below:\\n\\n * if concatenating ``str`` objects, you can build a list and use\\n ``str.join()`` at the end or else write to a ``io.StringIO``\\n instance and retrieve its value when complete\\n\\n * if concatenating ``bytes`` objects, you can similarly use\\n ``bytes.join()`` or ``io.BytesIO``, or you can do in-place\\n concatenation with a ``bytearray`` object. ``bytearray`` objects\\n are mutable and have an efficient overallocation mechanism\\n\\n * if concatenating ``tuple`` objects, extend a ``list`` instead\\n\\n * for other types, investigate the relevant class documentation\\n\\n7. Some sequence types (such as ``range``) only support item sequences\\n that follow specific patterns, and hence don\\'t support sequence\\n concatenation or repetition.\\n\\n8. ``index`` raises ``ValueError`` when *x* is not found in *s*. When\\n supported, the additional arguments to the index method allow\\n efficient searching of subsections of the sequence. Passing the\\n extra arguments is roughly equivalent to using ``s[i:j].index(x)``,\\n only without copying any data and with the returned index being\\n relative to the start of the sequence rather than the start of the\\n slice.\\n\\n\\nImmutable Sequence Types\\n========================\\n\\nThe only operation that immutable sequence types generally implement\\nthat is not also implemented by mutable sequence types is support for\\nthe ``hash()`` built-in.\\n\\nThis support allows immutable sequences, such as ``tuple`` instances,\\nto be used as ``dict`` keys and stored in ``set`` and ``frozenset``\\ninstances.\\n\\nAttempting to hash an immutable sequence that contains unhashable\\nvalues will result in ``TypeError``.\\n\\n\\nMutable Sequence Types\\n======================\\n\\nThe operations in the following table are defined on mutable sequence\\ntypes. The ``collections.abc.MutableSequence`` ABC is provided to make\\nit easier to correctly implement these operations on custom sequence\\ntypes.\\n\\nIn the table *s* is an instance of a mutable sequence type, *t* is any\\niterable object and *x* is an arbitrary object that meets any type and\\nvalue restrictions imposed by *s* (for example, ``bytearray`` only\\naccepts integers that meet the value restriction ``0 <= x <= 255``).\\n\\n+--------------------------------+----------------------------------+-----------------------+\\n| Operation | Result | Notes |\\n+================================+==================================+=======================+\\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\\n| | *x* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\\n| | replaced by the contents of the | |\\n| | iterable *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\\n| | replaced by those of *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j:k]`` | removes the elements of | |\\n| | ``s[i:j:k]`` from the list | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.append(x)`` | appends *x* to the end of the | |\\n| | sequence (same as | |\\n| | ``s[len(s):len(s)] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.clear()`` | removes all items from ``s`` | (5) |\\n| | (same as ``del s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.copy()`` | creates a shallow copy of ``s`` | (5) |\\n| | (same as ``s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.extend(t)`` | extends *s* with the contents of | |\\n| | *t* (same as ``s[len(s):len(s)] | |\\n| | = t``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.insert(i, x)`` | inserts *x* into *s* at the | |\\n| | index given by *i* (same as | |\\n| | ``s[i:i] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.pop([i])`` | retrieves the item at *i* and | (2) |\\n| | also removes it from *s* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.remove(x)`` | remove the first item from *s* | (3) |\\n| | where ``s[i] == x`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.reverse()`` | reverses the items of *s* in | (4) |\\n| | place | |\\n+--------------------------------+----------------------------------+-----------------------+\\n\\nNotes:\\n\\n1. *t* must have the same length as the slice it is replacing.\\n\\n2. The optional argument *i* defaults to ``-1``, so that by default\\n the last item is removed and returned.\\n\\n3. ``remove`` raises ``ValueError`` when *x* is not found in *s*.\\n\\n4. The ``reverse()`` method modifies the sequence in place for economy\\n of space when reversing a large sequence. To remind users that it\\n operates by side effect, it does not return the reversed sequence.\\n\\n5. ``clear()`` and ``copy()`` are included for consistency with the\\n interfaces of mutable containers that don\\'t support slicing\\n operations (such as ``dict`` and ``set``)\\n\\n New in version 3.3: ``clear()`` and ``copy()`` methods.\\n\\n\\nLists\\n=====\\n\\nLists are mutable sequences, typically used to store collections of\\nhomogeneous items (where the precise degree of similarity will vary by\\napplication).\\n\\nclass class list([iterable])\\n\\n Lists may be constructed in several ways:\\n\\n * Using a pair of square brackets to denote the empty list: ``[]``\\n\\n * Using square brackets, separating items with commas: ``[a]``,\\n ``[a, b, c]``\\n\\n * Using a list comprehension: ``[x for x in iterable]``\\n\\n * Using the type constructor: ``list()`` or ``list(iterable)``\\n\\n The constructor builds a list whose items are the same and in the\\n same order as *iterable*\\'s items. *iterable* may be either a\\n sequence, a container that supports iteration, or an iterator\\n object. If *iterable* is already a list, a copy is made and\\n returned, similar to ``iterable[:]``. For example, ``list(\\'abc\\')``\\n returns ``[\\'a\\', \\'b\\', \\'c\\']`` and ``list( (1, 2, 3) )`` returns ``[1,\\n 2, 3]``. If no argument is given, the constructor creates a new\\n empty list, ``[]``.\\n\\n Many other operations also produce lists, including the\\n ``sorted()`` built-in.\\n\\n Lists implement all of the *common* and *mutable* sequence\\n operations. Lists also provide the following additional method:\\n\\n sort(*, key=None, reverse=None)\\n\\n This method sorts the list in place, using only ``<``\\n comparisons between items. Exceptions are not suppressed - if\\n any comparison operations fail, the entire sort operation will\\n fail (and the list will likely be left in a partially modified\\n state).\\n\\n *key* specifies a function of one argument that is used to\\n extract a comparison key from each list element (for example,\\n ``key=str.lower``). The key corresponding to each item in the\\n list is calculated once and then used for the entire sorting\\n process. The default value of ``None`` means that list items are\\n sorted directly without calculating a separate key value.\\n\\n The ``functools.cmp_to_key()`` utility is available to convert a\\n 2.x style *cmp* function to a *key* function.\\n\\n *reverse* is a boolean value. If set to ``True``, then the list\\n elements are sorted as if each comparison were reversed.\\n\\n This method modifies the sequence in place for economy of space\\n when sorting a large sequence. To remind users that it operates\\n by side effect, it does not return the sorted sequence (use\\n ``sorted()`` to explicitly request a new sorted list instance).\\n\\n The ``sort()`` method is guaranteed to be stable. A sort is\\n stable if it guarantees not to change the relative order of\\n elements that compare equal --- this is helpful for sorting in\\n multiple passes (for example, sort by department, then by salary\\n grade).\\n\\n **CPython implementation detail:** While a list is being sorted,\\n the effect of attempting to mutate, or even inspect, the list is\\n undefined. The C implementation of Python makes the list appear\\n empty for the duration, and raises ``ValueError`` if it can\\n detect that the list has been mutated during a sort.\\n\\n\\nTuples\\n======\\n\\nTuples are immutable sequences, typically used to store collections of\\nheterogeneous data (such as the 2-tuples produced by the\\n``enumerate()`` built-in). Tuples are also used for cases where an\\nimmutable sequence of homogeneous data is needed (such as allowing\\nstorage in a ``set`` or ``dict`` instance).\\n\\nclass class tuple([iterable])\\n\\n Tuples may be constructed in a number of ways:\\n\\n * Using a pair of parentheses to denote the empty tuple: ``()``\\n\\n * Using a trailing comma for a singleton tuple: ``a,`` or ``(a,)``\\n\\n * Separating items with commas: ``a, b, c`` or ``(a, b, c)``\\n\\n * Using the ``tuple()`` built-in: ``tuple()`` or\\n ``tuple(iterable)``\\n\\n The constructor builds a tuple whose items are the same and in the\\n same order as *iterable*\\'s items. *iterable* may be either a\\n sequence, a container that supports iteration, or an iterator\\n object. If *iterable* is already a tuple, it is returned\\n unchanged. For example, ``tuple(\\'abc\\')`` returns ``(\\'a\\', \\'b\\',\\n \\'c\\')`` and ``tuple( [1, 2, 3] )`` returns ``(1, 2, 3)``. If no\\n argument is given, the constructor creates a new empty tuple,\\n ``()``.\\n\\n Note that it is actually the comma which makes a tuple, not the\\n parentheses. The parentheses are optional, except in the empty\\n tuple case, or when they are needed to avoid syntactic ambiguity.\\n For example, ``f(a, b, c)`` is a function call with three\\n arguments, while ``f((a, b, c))`` is a function call with a 3-tuple\\n as the sole argument.\\n\\n Tuples implement all of the *common* sequence operations.\\n\\nFor heterogeneous collections of data where access by name is clearer\\nthan access by index, ``collections.namedtuple()`` may be a more\\nappropriate choice than a simple tuple object.\\n\\n\\nRanges\\n======\\n\\nThe ``range`` type represents an immutable sequence of numbers and is\\ncommonly used for looping a specific number of times in ``for`` loops.\\n\\nclass class range(stop)\\nclass class range(start, stop[, step])\\n\\n The arguments to the range constructor must be integers (either\\n built-in ``int`` or any object that implements the ``__index__``\\n special method). If the *step* argument is omitted, it defaults to\\n ``1``. If the *start* argument is omitted, it defaults to ``0``. If\\n *step* is zero, ``ValueError`` is raised.\\n\\n For a positive *step*, the contents of a range ``r`` are determined\\n by the formula ``r[i] = start + step*i`` where ``i >= 0`` and\\n ``r[i] < stop``.\\n\\n For a negative *step*, the contents of the range are still\\n determined by the formula ``r[i] = start + step*i``, but the\\n constraints are ``i >= 0`` and ``r[i] > stop``.\\n\\n A range object will be empty if ``r[0]`` does not meet the value\\n constraint. Ranges do support negative indices, but these are\\n interpreted as indexing from the end of the sequence determined by\\n the positive indices.\\n\\n Ranges containing absolute values larger than ``sys.maxsize`` are\\n permitted but some features (such as ``len()``) may raise\\n ``OverflowError``.\\n\\n Range examples:\\n\\n >>> list(range(10))\\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\\n >>> list(range(1, 11))\\n [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\\n >>> list(range(0, 30, 5))\\n [0, 5, 10, 15, 20, 25]\\n >>> list(range(0, 10, 3))\\n [0, 3, 6, 9]\\n >>> list(range(0, -10, -1))\\n [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\\n >>> list(range(0))\\n []\\n >>> list(range(1, 0))\\n []\\n\\n Ranges implement all of the *common* sequence operations except\\n concatenation and repetition (due to the fact that range objects\\n can only represent sequences that follow a strict pattern and\\n repetition and concatenation will usually violate that pattern).\\n\\nThe advantage of the ``range`` type over a regular ``list`` or\\n``tuple`` is that a ``range`` object will always take the same (small)\\namount of memory, no matter the size of the range it represents (as it\\nonly stores the ``start``, ``stop`` and ``step`` values, calculating\\nindividual items and subranges as needed).\\n\\nRange objects implement the ``collections.Sequence`` ABC, and provide\\nfeatures such as containment tests, element index lookup, slicing and\\nsupport for negative indices (see *Sequence Types --- list, tuple,\\nrange*):\\n\\n>>> r = range(0, 20, 2)\\n>>> r\\nrange(0, 20, 2)\\n>>> 11 in r\\nFalse\\n>>> 10 in r\\nTrue\\n>>> r.index(10)\\n5\\n>>> r[5]\\n10\\n>>> r[:5]\\nrange(0, 10, 2)\\n>>> r[-1]\\n18\\n\\nTesting range objects for equality with ``==`` and ``!=`` compares\\nthem as sequences. That is, two range objects are considered equal if\\nthey represent the same sequence of values. (Note that two range\\nobjects that compare equal might have different ``start``, ``stop``\\nand ``step`` attributes, for example ``range(0) == range(2, 1, 3)`` or\\n``range(0, 3, 2) == range(0, 4, 2)``.)\\n\\nChanged in version 3.2: Implement the Sequence ABC. Support slicing\\nand negative indices. Test ``int`` objects for membership in constant\\ntime instead of iterating through all items.\\n\\nChanged in version 3.3: Define \\'==\\' and \\'!=\\' to compare range objects\\nbased on the sequence of values they define (instead of comparing\\nbased on object identity).\\n\\nNew in version 3.3: The ``start``, ``stop`` and ``step`` attributes.\\n',\n'typesseq-mutable':\"\\nMutable Sequence Types\\n**********************\\n\\nThe operations in the following table are defined on mutable sequence\\ntypes. The ``collections.abc.MutableSequence`` ABC is provided to make\\nit easier to correctly implement these operations on custom sequence\\ntypes.\\n\\nIn the table *s* is an instance of a mutable sequence type, *t* is any\\niterable object and *x* is an arbitrary object that meets any type and\\nvalue restrictions imposed by *s* (for example, ``bytearray`` only\\naccepts integers that meet the value restriction ``0 <= x <= 255``).\\n\\n+--------------------------------+----------------------------------+-----------------------+\\n| Operation | Result | Notes |\\n+================================+==================================+=======================+\\n| ``s[i] = x`` | item *i* of *s* is replaced by | |\\n| | *x* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j] = t`` | slice of *s* from *i* to *j* is | |\\n| | replaced by the contents of the | |\\n| | iterable *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j]`` | same as ``s[i:j] = []`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s[i:j:k] = t`` | the elements of ``s[i:j:k]`` are | (1) |\\n| | replaced by those of *t* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``del s[i:j:k]`` | removes the elements of | |\\n| | ``s[i:j:k]`` from the list | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.append(x)`` | appends *x* to the end of the | |\\n| | sequence (same as | |\\n| | ``s[len(s):len(s)] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.clear()`` | removes all items from ``s`` | (5) |\\n| | (same as ``del s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.copy()`` | creates a shallow copy of ``s`` | (5) |\\n| | (same as ``s[:]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.extend(t)`` | extends *s* with the contents of | |\\n| | *t* (same as ``s[len(s):len(s)] | |\\n| | = t``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.insert(i, x)`` | inserts *x* into *s* at the | |\\n| | index given by *i* (same as | |\\n| | ``s[i:i] = [x]``) | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.pop([i])`` | retrieves the item at *i* and | (2) |\\n| | also removes it from *s* | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.remove(x)`` | remove the first item from *s* | (3) |\\n| | where ``s[i] == x`` | |\\n+--------------------------------+----------------------------------+-----------------------+\\n| ``s.reverse()`` | reverses the items of *s* in | (4) |\\n| | place | |\\n+--------------------------------+----------------------------------+-----------------------+\\n\\nNotes:\\n\\n1. *t* must have the same length as the slice it is replacing.\\n\\n2. The optional argument *i* defaults to ``-1``, so that by default\\n the last item is removed and returned.\\n\\n3. ``remove`` raises ``ValueError`` when *x* is not found in *s*.\\n\\n4. The ``reverse()`` method modifies the sequence in place for economy\\n of space when reversing a large sequence. To remind users that it\\n operates by side effect, it does not return the reversed sequence.\\n\\n5. ``clear()`` and ``copy()`` are included for consistency with the\\n interfaces of mutable containers that don't support slicing\\n operations (such as ``dict`` and ``set``)\\n\\n New in version 3.3: ``clear()`` and ``copy()`` methods.\\n\",\n'unary':'\\nUnary arithmetic and bitwise operations\\n***************************************\\n\\nAll unary arithmetic and bitwise operations have the same priority:\\n\\n u_expr ::= power | \"-\" u_expr | \"+\" u_expr | \"~\" u_expr\\n\\nThe unary ``-`` (minus) operator yields the negation of its numeric\\nargument.\\n\\nThe unary ``+`` (plus) operator yields its numeric argument unchanged.\\n\\nThe unary ``~`` (invert) operator yields the bitwise inversion of its\\ninteger argument. The bitwise inversion of ``x`` is defined as\\n``-(x+1)``. It only applies to integral numbers.\\n\\nIn all three cases, if the argument does not have the proper type, a\\n``TypeError`` exception is raised.\\n',\n'while':'\\nThe ``while`` statement\\n***********************\\n\\nThe ``while`` statement is used for repeated execution as long as an\\nexpression is true:\\n\\n while_stmt ::= \"while\" expression \":\" suite\\n [\"else\" \":\" suite]\\n\\nThis repeatedly tests the expression and, if it is true, executes the\\nfirst suite; if the expression is false (which may be the first time\\nit is tested) the suite of the ``else`` clause, if present, is\\nexecuted and the loop terminates.\\n\\nA ``break`` statement executed in the first suite terminates the loop\\nwithout executing the ``else`` clause\\'s suite. A ``continue``\\nstatement executed in the first suite skips the rest of the suite and\\ngoes back to testing the expression.\\n',\n'with':'\\nThe ``with`` statement\\n**********************\\n\\nThe ``with`` statement is used to wrap the execution of a block with\\nmethods defined by a context manager (see section *With Statement\\nContext Managers*). This allows common\\n``try``...``except``...``finally`` usage patterns to be encapsulated\\nfor convenient reuse.\\n\\n with_stmt ::= \"with\" with_item (\",\" with_item)* \":\" suite\\n with_item ::= expression [\"as\" target]\\n\\nThe execution of the ``with`` statement with one \"item\" proceeds as\\nfollows:\\n\\n1. The context expression (the expression given in the ``with_item``)\\n is evaluated to obtain a context manager.\\n\\n2. The context manager\\'s ``__exit__()`` is loaded for later use.\\n\\n3. The context manager\\'s ``__enter__()`` method is invoked.\\n\\n4. If a target was included in the ``with`` statement, the return\\n value from ``__enter__()`` is assigned to it.\\n\\n Note: The ``with`` statement guarantees that if the ``__enter__()``\\n method returns without an error, then ``__exit__()`` will always\\n be called. Thus, if an error occurs during the assignment to the\\n target list, it will be treated the same as an error occurring\\n within the suite would be. See step 6 below.\\n\\n5. The suite is executed.\\n\\n6. The context manager\\'s ``__exit__()`` method is invoked. If an\\n exception caused the suite to be exited, its type, value, and\\n traceback are passed as arguments to ``__exit__()``. Otherwise,\\n three ``None`` arguments are supplied.\\n\\n If the suite was exited due to an exception, and the return value\\n from the ``__exit__()`` method was false, the exception is\\n reraised. If the return value was true, the exception is\\n suppressed, and execution continues with the statement following\\n the ``with`` statement.\\n\\n If the suite was exited for any reason other than an exception, the\\n return value from ``__exit__()`` is ignored, and execution proceeds\\n at the normal location for the kind of exit that was taken.\\n\\nWith more than one item, the context managers are processed as if\\nmultiple ``with`` statements were nested:\\n\\n with A() as a, B() as b:\\n suite\\n\\nis equivalent to\\n\\n with A() as a:\\n with B() as b:\\n suite\\n\\nChanged in version 3.1: Support for multiple context expressions.\\n\\nSee also:\\n\\n **PEP 0343** - The \"with\" statement\\n The specification, background, and examples for the Python\\n ``with`` statement.\\n',\n'yield':'\\nThe ``yield`` statement\\n***********************\\n\\n yield_stmt ::= yield_expression\\n\\nThe ``yield`` statement is only used when defining a generator\\nfunction, and is only used in the body of the generator function.\\nUsing a ``yield`` statement in a function definition is sufficient to\\ncause that definition to create a generator function instead of a\\nnormal function.\\n\\nWhen a generator function is called, it returns an iterator known as a\\ngenerator iterator, or more commonly, a generator. The body of the\\ngenerator function is executed by calling the ``next()`` function on\\nthe generator repeatedly until it raises an exception.\\n\\nWhen a ``yield`` statement is executed, the state of the generator is\\nfrozen and the value of ``expression_list`` is returned to\\n``next()``\\'s caller. By \"frozen\" we mean that all local state is\\nretained, including the current bindings of local variables, the\\ninstruction pointer, and the internal evaluation stack: enough\\ninformation is saved so that the next time ``next()`` is invoked, the\\nfunction can proceed exactly as if the ``yield`` statement were just\\nanother external call.\\n\\nThe ``yield`` statement is allowed in the ``try`` clause of a ``try``\\n... ``finally`` construct. If the generator is not resumed before it\\nis finalized (by reaching a zero reference count or by being garbage\\ncollected), the generator-iterator\\'s ``close()`` method will be\\ncalled, allowing any pending ``finally`` clauses to execute.\\n\\nWhen ``yield from `` is used, it treats the supplied expression\\nas a subiterator, producing values from it until the underlying\\niterator is exhausted.\\n\\n Changed in version 3.3: Added ``yield from `` to delegate\\n control flow to a subiterator\\n\\nFor full details of ``yield`` semantics, refer to the *Yield\\nexpressions* section.\\n\\nSee also:\\n\\n **PEP 0255** - Simple Generators\\n The proposal for adding generators and the ``yield`` statement\\n to Python.\\n\\n **PEP 0342** - Coroutines via Enhanced Generators\\n The proposal to enhance the API and syntax of generators, making\\n them usable as simple coroutines.\\n\\n **PEP 0380** - Syntax for Delegating to a Subgenerator\\n The proposal to introduce the ``yield_from`` syntax, making\\n delegation to sub-generators easy.\\n'}\n", []], "pydoc_data": [".py", "", [], 1], "unittest.async_case": [".py", "\nimport browser.aio as asyncio\nimport inspect\n\nfrom .case import TestCase\n\n\n\nclass IsolatedAsyncioTestCase(TestCase):\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n def __init__(self,methodName='runTest'):\n super().__init__(methodName)\n self._asyncioTestLoop=None\n self._asyncioCallsQueue=None\n \n async def asyncSetUp(self):\n pass\n \n async def asyncTearDown(self):\n pass\n \n def addAsyncCleanup(self,func,/,*args,**kwargs):\n \n \n \n \n \n \n \n \n \n \n \n \n self.addCleanup(*(func,*args),**kwargs)\n \n def _callSetUp(self):\n self.setUp()\n self._callAsync(self.asyncSetUp)\n \n def _callTestMethod(self,method):\n self._callMaybeAsync(method)\n \n def _callTearDown(self):\n self._callAsync(self.asyncTearDown)\n self.tearDown()\n \n def _callCleanup(self,function,*args,**kwargs):\n self._callMaybeAsync(function,*args,**kwargs)\n \n def _callAsync(self,func,/,*args,**kwargs):\n assert self._asyncioTestLoop is not None\n ret=func(*args,**kwargs)\n assert inspect.isawaitable(ret)\n fut=self._asyncioTestLoop.create_future()\n self._asyncioCallsQueue.put_nowait((fut,ret))\n return self._asyncioTestLoop.run_until_complete(fut)\n \n def _callMaybeAsync(self,func,/,*args,**kwargs):\n assert self._asyncioTestLoop is not None\n ret=func(*args,**kwargs)\n if inspect.isawaitable(ret):\n fut=self._asyncioTestLoop.create_future()\n self._asyncioCallsQueue.put_nowait((fut,ret))\n return self._asyncioTestLoop.run_until_complete(fut)\n else :\n return ret\n \n async def _asyncioLoopRunner(self,fut):\n self._asyncioCallsQueue=queue=asyncio.Queue()\n fut.set_result(None )\n while True :\n query=await queue.get()\n queue.task_done()\n if query is None :\n return\n fut,awaitable=query\n try :\n ret=await awaitable\n if not fut.cancelled():\n fut.set_result(ret)\n except asyncio.CancelledError:\n raise\n except Exception as ex:\n if not fut.cancelled():\n fut.set_exception(ex)\n \n def _setupAsyncioLoop(self):\n assert self._asyncioTestLoop is None\n loop=asyncio.new_event_loop()\n asyncio.set_event_loop(loop)\n loop.set_debug(True )\n self._asyncioTestLoop=loop\n fut=loop.create_future()\n self._asyncioCallsTask=loop.create_task(self._asyncioLoopRunner(fut))\n loop.run_until_complete(fut)\n \n def _tearDownAsyncioLoop(self):\n assert self._asyncioTestLoop is not None\n loop=self._asyncioTestLoop\n self._asyncioTestLoop=None\n self._asyncioCallsQueue.put_nowait(None )\n loop.run_until_complete(self._asyncioCallsQueue.join())\n \n try :\n \n to_cancel=asyncio.all_tasks(loop)\n if not to_cancel:\n return\n \n for task in to_cancel:\n task.cancel()\n \n loop.run_until_complete(\n asyncio.gather(*to_cancel,loop=loop,return_exceptions=True ))\n \n for task in to_cancel:\n if task.cancelled():\n continue\n if task.exception()is not None :\n loop.call_exception_handler({\n 'message':'unhandled exception during test shutdown',\n 'exception':task.exception(),\n 'task':task,\n })\n \n loop.run_until_complete(loop.shutdown_asyncgens())\n finally :\n asyncio.set_event_loop(None )\n loop.close()\n \n def run(self,result=None ):\n self._setupAsyncioLoop()\n try :\n return super().run(result)\n finally :\n self._tearDownAsyncioLoop()\n", ["browser.aio", "inspect", "unittest.case"]], "unittest.case": [".py", "''\n\nimport sys\nimport functools\nimport difflib\nimport logging\nimport pprint\nimport re\nimport warnings\nimport collections\nimport contextlib\nimport traceback\nimport types\n\nfrom . import result\nfrom .util import (strclass,safe_repr,_count_diff_all_purpose,\n_count_diff_hashable,_common_shorten_repr)\n\n__unittest=True\n\n_subtest_msg_sentinel=object()\n\nDIFF_OMITTED=('\\nDiff is %s characters long. '\n'Set self.maxDiff to None to see it.')\n\nclass SkipTest(Exception):\n ''\n\n\n\n\n \n \nclass _ShouldStop(Exception):\n ''\n\n \n \nclass _UnexpectedSuccess(Exception):\n ''\n\n \n \n \nclass _Outcome(object):\n def __init__(self,result=None ):\n self.expecting_failure=False\n self.result=result\n self.result_supports_subtests=hasattr(result,\"addSubTest\")\n self.success=True\n self.skipped=[]\n self.expectedFailure=None\n self.errors=[]\n \n @contextlib.contextmanager\n def testPartExecutor(self,test_case,isTest=False ):\n old_success=self.success\n self.success=True\n try :\n yield\n except KeyboardInterrupt:\n raise\n except SkipTest as e:\n self.success=False\n self.skipped.append((test_case,str(e)))\n except _ShouldStop:\n pass\n except :\n exc_info=sys.exc_info()\n if self.expecting_failure:\n self.expectedFailure=exc_info\n else :\n self.success=False\n self.errors.append((test_case,exc_info))\n \n \n exc_info=None\n else :\n if self.result_supports_subtests and self.success:\n self.errors.append((test_case,None ))\n finally :\n self.success=self.success and old_success\n \n \ndef _id(obj):\n return obj\n \n \n_module_cleanups=[]\ndef addModuleCleanup(function,/,*args,**kwargs):\n ''\n \n _module_cleanups.append((function,args,kwargs))\n \n \ndef doModuleCleanups():\n ''\n \n exceptions=[]\n while _module_cleanups:\n function,args,kwargs=_module_cleanups.pop()\n try :\n function(*args,**kwargs)\n except Exception as exc:\n exceptions.append(exc)\n if exceptions:\n \n \n raise exceptions[0]\n \n \ndef skip(reason):\n ''\n\n \n def decorator(test_item):\n if not isinstance(test_item,type):\n @functools.wraps(test_item)\n def skip_wrapper(*args,**kwargs):\n raise SkipTest(reason)\n test_item=skip_wrapper\n \n test_item.__unittest_skip__=True\n test_item.__unittest_skip_why__=reason\n return test_item\n if isinstance(reason,types.FunctionType):\n test_item=reason\n reason=''\n return decorator(test_item)\n return decorator\n \ndef skipIf(condition,reason):\n ''\n\n \n if condition:\n return skip(reason)\n return _id\n \ndef skipUnless(condition,reason):\n ''\n\n \n if not condition:\n return skip(reason)\n return _id\n \ndef expectedFailure(test_item):\n test_item.__unittest_expecting_failure__=True\n return test_item\n \ndef _is_subtype(expected,basetype):\n if isinstance(expected,tuple):\n return all(_is_subtype(e,basetype)for e in expected)\n return isinstance(expected,type)and issubclass(expected,basetype)\n \nclass _BaseTestCaseContext:\n\n def __init__(self,test_case):\n self.test_case=test_case\n \n def _raiseFailure(self,standardMsg):\n msg=self.test_case._formatMessage(self.msg,standardMsg)\n raise self.test_case.failureException(msg)\n \nclass _AssertRaisesBaseContext(_BaseTestCaseContext):\n\n def __init__(self,expected,test_case,expected_regex=None ):\n _BaseTestCaseContext.__init__(self,test_case)\n self.expected=expected\n self.test_case=test_case\n if expected_regex is not None :\n expected_regex=re.compile(expected_regex)\n self.expected_regex=expected_regex\n self.obj_name=None\n self.msg=None\n \n def handle(self,name,args,kwargs):\n ''\n\n\n\n\n \n try :\n if not _is_subtype(self.expected,self._base_type):\n raise TypeError('%s() arg 1 must be %s'%\n (name,self._base_type_str))\n if not args:\n self.msg=kwargs.pop('msg',None )\n if kwargs:\n raise TypeError('%r is an invalid keyword argument for '\n 'this function'%(next(iter(kwargs)),))\n return self\n \n callable_obj,*args=args\n try :\n self.obj_name=callable_obj.__name__\n except AttributeError:\n self.obj_name=str(callable_obj)\n with self:\n callable_obj(*args,**kwargs)\n finally :\n \n self=None\n \n \nclass _AssertRaisesContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=BaseException\n _base_type_str='an exception type or tuple of exception types'\n \n def __enter__(self):\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n if exc_type is None :\n try :\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n if self.obj_name:\n self._raiseFailure(\"{} not raised by {}\".format(exc_name,\n self.obj_name))\n else :\n self._raiseFailure(\"{} not raised\".format(exc_name))\n else :\n traceback.clear_frames(tb)\n if not issubclass(exc_type,self.expected):\n \n return False\n \n self.exception=exc_value.with_traceback(None )\n if self.expected_regex is None :\n return True\n \n expected_regex=self.expected_regex\n if not expected_regex.search(str(exc_value)):\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n expected_regex.pattern,str(exc_value)))\n return True\n \n \nclass _AssertWarnsContext(_AssertRaisesBaseContext):\n ''\n \n _base_type=Warning\n _base_type_str='a warning type or tuple of warning types'\n \n def __enter__(self):\n \n \n for v in sys.modules.values():\n if getattr(v,'__warningregistry__',None ):\n v.__warningregistry__={}\n self.warnings_manager=warnings.catch_warnings(record=True )\n self.warnings=self.warnings_manager.__enter__()\n warnings.simplefilter(\"always\",self.expected)\n return self\n \n def __exit__(self,exc_type,exc_value,tb):\n self.warnings_manager.__exit__(exc_type,exc_value,tb)\n if exc_type is not None :\n \n return\n try :\n exc_name=self.expected.__name__\n except AttributeError:\n exc_name=str(self.expected)\n first_matching=None\n for m in self.warnings:\n w=m.message\n if not isinstance(w,self.expected):\n continue\n if first_matching is None :\n first_matching=w\n if (self.expected_regex is not None and\n not self.expected_regex.search(str(w))):\n continue\n \n self.warning=w\n self.filename=m.filename\n self.lineno=m.lineno\n return\n \n if first_matching is not None :\n self._raiseFailure('\"{}\" does not match \"{}\"'.format(\n self.expected_regex.pattern,str(first_matching)))\n if self.obj_name:\n self._raiseFailure(\"{} not triggered by {}\".format(exc_name,\n self.obj_name))\n else :\n self._raiseFailure(\"{} not triggered\".format(exc_name))\n \n \n \n_LoggingWatcher=collections.namedtuple(\"_LoggingWatcher\",\n[\"records\",\"output\"])\n\n\nclass _CapturingHandler(logging.Handler):\n ''\n\n \n \n def __init__(self):\n logging.Handler.__init__(self)\n self.watcher=_LoggingWatcher([],[])\n \n def flush(self):\n pass\n \n def emit(self,record):\n self.watcher.records.append(record)\n msg=self.format(record)\n self.watcher.output.append(msg)\n \n \n \nclass _AssertLogsContext(_BaseTestCaseContext):\n ''\n \n LOGGING_FORMAT=\"%(levelname)s:%(name)s:%(message)s\"\n \n def __init__(self,test_case,logger_name,level):\n _BaseTestCaseContext.__init__(self,test_case)\n self.logger_name=logger_name\n if level:\n self.level=logging._nameToLevel.get(level,level)\n else :\n self.level=logging.INFO\n self.msg=None\n \n def __enter__(self):\n if isinstance(self.logger_name,logging.Logger):\n logger=self.logger=self.logger_name\n else :\n logger=self.logger=logging.getLogger(self.logger_name)\n formatter=logging.Formatter(self.LOGGING_FORMAT)\n handler=_CapturingHandler()\n handler.setFormatter(formatter)\n self.watcher=handler.watcher\n self.old_handlers=logger.handlers[:]\n self.old_level=logger.level\n self.old_propagate=logger.propagate\n logger.handlers=[handler]\n logger.setLevel(self.level)\n logger.propagate=False\n return handler.watcher\n \n def __exit__(self,exc_type,exc_value,tb):\n self.logger.handlers=self.old_handlers\n self.logger.propagate=self.old_propagate\n self.logger.setLevel(self.old_level)\n if exc_type is not None :\n \n return False\n if len(self.watcher.records)==0:\n self._raiseFailure(\n \"no logs of level {} or higher triggered on {}\"\n .format(logging.getLevelName(self.level),self.logger.name))\n \n \nclass _OrderedChainMap(collections.ChainMap):\n def __iter__(self):\n seen=set()\n for mapping in self.maps:\n for k in mapping:\n if k not in seen:\n seen.add(k)\n yield k\n \n \nclass TestCase(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n failureException=AssertionError\n \n longMessage=True\n \n maxDiff=80 *8\n \n \n \n _diffThreshold=2 **16\n \n \n \n _classSetupFailed=False\n \n _class_cleanups=[]\n \n def __init__(self,methodName='runTest'):\n ''\n\n\n \n self._testMethodName=methodName\n self._outcome=None\n self._testMethodDoc='No test'\n try :\n testMethod=getattr(self,methodName)\n except AttributeError:\n if methodName !='runTest':\n \n \n raise ValueError(\"no such test method in %s: %s\"%\n (self.__class__,methodName))\n else :\n self._testMethodDoc=testMethod.__doc__\n self._cleanups=[]\n self._subtest=None\n \n \n \n \n self._type_equality_funcs={}\n self.addTypeEqualityFunc(dict,'assertDictEqual')\n self.addTypeEqualityFunc(list,'assertListEqual')\n self.addTypeEqualityFunc(tuple,'assertTupleEqual')\n self.addTypeEqualityFunc(set,'assertSetEqual')\n self.addTypeEqualityFunc(frozenset,'assertSetEqual')\n self.addTypeEqualityFunc(str,'assertMultiLineEqual')\n \n def addTypeEqualityFunc(self,typeobj,function):\n ''\n\n\n\n\n\n\n\n\n\n\n \n self._type_equality_funcs[typeobj]=function\n \n def addCleanup(*args,**kwargs):\n ''\n\n\n\n \n if len(args)>=2:\n self,function,*args=args\n elif not args:\n raise TypeError(\"descriptor 'addCleanup' of 'TestCase' object \"\n \"needs an argument\")\n elif 'function'in kwargs:\n function=kwargs.pop('function')\n self,*args=args\n import warnings\n warnings.warn(\"Passing 'function' as keyword argument is deprecated\",\n DeprecationWarning,stacklevel=2)\n else :\n raise TypeError('addCleanup expected at least 1 positional '\n 'argument, got %d'%(len(args)-1))\n args=tuple(args)\n \n self._cleanups.append((function,args,kwargs))\n addCleanup.__text_signature__='($self, function, /, *args, **kwargs)'\n \n @classmethod\n def addClassCleanup(cls,function,/,*args,**kwargs):\n ''\n \n cls._class_cleanups.append((function,args,kwargs))\n \n def setUp(self):\n ''\n pass\n \n def tearDown(self):\n ''\n pass\n \n @classmethod\n def setUpClass(cls):\n ''\n \n @classmethod\n def tearDownClass(cls):\n ''\n \n def countTestCases(self):\n return 1\n \n def defaultTestResult(self):\n return result.TestResult()\n \n def shortDescription(self):\n ''\n\n\n\n\n \n doc=self._testMethodDoc\n return doc and doc.split(\"\\n\")[0].strip()or None\n \n \n def id(self):\n return \"%s.%s\"%(strclass(self.__class__),self._testMethodName)\n \n def __eq__(self,other):\n if type(self)is not type(other):\n return NotImplemented\n \n return self._testMethodName ==other._testMethodName\n \n def __hash__(self):\n return hash((type(self),self._testMethodName))\n \n def __str__(self):\n return \"%s (%s)\"%(self._testMethodName,strclass(self.__class__))\n \n def __repr__(self):\n return \"<%s testMethod=%s>\"%\\\n (strclass(self.__class__),self._testMethodName)\n \n def _addSkip(self,result,test_case,reason):\n addSkip=getattr(result,'addSkip',None )\n if addSkip is not None :\n addSkip(test_case,reason)\n else :\n warnings.warn(\"TestResult has no addSkip method, skips not reported\",\n RuntimeWarning,2)\n result.addSuccess(test_case)\n \n @contextlib.contextmanager\n def subTest(self,msg=_subtest_msg_sentinel,**params):\n ''\n\n\n\n\n \n if self._outcome is None or not self._outcome.result_supports_subtests:\n yield\n return\n parent=self._subtest\n if parent is None :\n params_map=_OrderedChainMap(params)\n else :\n params_map=parent.params.new_child(params)\n self._subtest=_SubTest(self,msg,params_map)\n try :\n with self._outcome.testPartExecutor(self._subtest,isTest=True ):\n yield\n if not self._outcome.success:\n result=self._outcome.result\n if result is not None and result.failfast:\n raise _ShouldStop\n elif self._outcome.expectedFailure:\n \n \n raise _ShouldStop\n finally :\n self._subtest=parent\n \n def _feedErrorsToResult(self,result,errors):\n for test,exc_info in errors:\n if isinstance(test,_SubTest):\n result.addSubTest(test.test_case,test,exc_info)\n elif exc_info is not None :\n if issubclass(exc_info[0],self.failureException):\n result.addFailure(test,exc_info)\n else :\n result.addError(test,exc_info)\n \n def _addExpectedFailure(self,result,exc_info):\n try :\n addExpectedFailure=result.addExpectedFailure\n except AttributeError:\n warnings.warn(\"TestResult has no addExpectedFailure method, reporting as passes\",\n RuntimeWarning)\n result.addSuccess(self)\n else :\n addExpectedFailure(self,exc_info)\n \n def _addUnexpectedSuccess(self,result):\n try :\n addUnexpectedSuccess=result.addUnexpectedSuccess\n except AttributeError:\n warnings.warn(\"TestResult has no addUnexpectedSuccess method, reporting as failure\",\n RuntimeWarning)\n \n \n try :\n raise _UnexpectedSuccess from None\n except _UnexpectedSuccess:\n result.addFailure(self,sys.exc_info())\n else :\n addUnexpectedSuccess(self)\n \n def _callSetUp(self):\n self.setUp()\n \n def _callTestMethod(self,method):\n method()\n \n def _callTearDown(self):\n self.tearDown()\n \n def _callCleanup(self,function,/,*args,**kwargs):\n function(*args,**kwargs)\n \n def run(self,result=None ):\n orig_result=result\n if result is None :\n result=self.defaultTestResult()\n startTestRun=getattr(result,'startTestRun',None )\n if startTestRun is not None :\n startTestRun()\n \n result.startTest(self)\n \n testMethod=getattr(self,self._testMethodName)\n if (getattr(self.__class__,\"__unittest_skip__\",False )or\n getattr(testMethod,\"__unittest_skip__\",False )):\n \n try :\n skip_why=(getattr(self.__class__,'__unittest_skip_why__','')\n or getattr(testMethod,'__unittest_skip_why__',''))\n self._addSkip(result,self,skip_why)\n finally :\n result.stopTest(self)\n return\n expecting_failure_method=getattr(testMethod,\n \"__unittest_expecting_failure__\",False )\n expecting_failure_class=getattr(self,\n \"__unittest_expecting_failure__\",False )\n expecting_failure=expecting_failure_class or expecting_failure_method\n outcome=_Outcome(result)\n try :\n self._outcome=outcome\n \n with outcome.testPartExecutor(self):\n self._callSetUp()\n if outcome.success:\n outcome.expecting_failure=expecting_failure\n with outcome.testPartExecutor(self,isTest=True ):\n self._callTestMethod(testMethod)\n outcome.expecting_failure=False\n with outcome.testPartExecutor(self):\n self._callTearDown()\n \n self.doCleanups()\n for test,reason in outcome.skipped:\n self._addSkip(result,test,reason)\n self._feedErrorsToResult(result,outcome.errors)\n if outcome.success:\n if expecting_failure:\n if outcome.expectedFailure:\n self._addExpectedFailure(result,outcome.expectedFailure)\n else :\n self._addUnexpectedSuccess(result)\n else :\n result.addSuccess(self)\n return result\n finally :\n result.stopTest(self)\n if orig_result is None :\n stopTestRun=getattr(result,'stopTestRun',None )\n if stopTestRun is not None :\n stopTestRun()\n \n \n \n \n outcome.errors.clear()\n outcome.expectedFailure=None\n \n \n self._outcome=None\n \n def doCleanups(self):\n ''\n \n outcome=self._outcome or _Outcome()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop()\n with outcome.testPartExecutor(self):\n self._callCleanup(function,*args,**kwargs)\n \n \n \n return outcome.success\n \n @classmethod\n def doClassCleanups(cls):\n ''\n \n cls.tearDown_exceptions=[]\n while cls._class_cleanups:\n function,args,kwargs=cls._class_cleanups.pop()\n try :\n function(*args,**kwargs)\n except Exception as exc:\n cls.tearDown_exceptions.append(sys.exc_info())\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n self.setUp()\n getattr(self,self._testMethodName)()\n self.tearDown()\n while self._cleanups:\n function,args,kwargs=self._cleanups.pop(-1)\n function(*args,**kwargs)\n \n def skipTest(self,reason):\n ''\n raise SkipTest(reason)\n \n def fail(self,msg=None ):\n ''\n raise self.failureException(msg)\n \n def assertFalse(self,expr,msg=None ):\n ''\n if expr:\n msg=self._formatMessage(msg,\"%s is not false\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def assertTrue(self,expr,msg=None ):\n ''\n if not expr:\n msg=self._formatMessage(msg,\"%s is not true\"%safe_repr(expr))\n raise self.failureException(msg)\n \n def _formatMessage(self,msg,standardMsg):\n ''\n\n\n\n\n\n\n\n \n if not self.longMessage:\n return msg or standardMsg\n if msg is None :\n return standardMsg\n try :\n \n \n return '%s : %s'%(standardMsg,msg)\n except UnicodeDecodeError:\n return '%s : %s'%(safe_repr(standardMsg),safe_repr(msg))\n \n def assertRaises(self,expected_exception,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self)\n try :\n return context.handle('assertRaises',args,kwargs)\n finally :\n \n context=None\n \n def assertWarns(self,expected_warning,*args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self)\n return context.handle('assertWarns',args,kwargs)\n \n def assertLogs(self,logger=None ,level=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n return _AssertLogsContext(self,logger,level)\n \n def _getAssertEqualityFunc(self,first,second):\n ''\n\n\n\n\n \n \n \n \n \n \n \n \n \n \n \n if type(first)is type(second):\n asserter=self._type_equality_funcs.get(type(first))\n if asserter is not None :\n if isinstance(asserter,str):\n asserter=getattr(self,asserter)\n return asserter\n \n return self._baseAssertEqual\n \n def _baseAssertEqual(self,first,second,msg=None ):\n ''\n if not first ==second:\n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertEqual(self,first,second,msg=None ):\n ''\n\n \n assertion_func=self._getAssertEqualityFunc(first,second)\n assertion_func(first,second,msg=msg)\n \n def assertNotEqual(self,first,second,msg=None ):\n ''\n\n \n if not first !=second:\n msg=self._formatMessage(msg,'%s == %s'%(safe_repr(first),\n safe_repr(second)))\n raise self.failureException(msg)\n \n def assertAlmostEqual(self,first,second,places=None ,msg=None ,\n delta=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n \n if first ==second:\n \n return\n if delta is not None and places is not None :\n raise TypeError(\"specify delta or places not both\")\n \n diff=abs(first -second)\n if delta is not None :\n if diff <=delta:\n return\n \n standardMsg='%s != %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else :\n if places is None :\n places=7\n \n if round(diff,places)==0:\n return\n \n standardMsg='%s != %s within %r places (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n places,\n safe_repr(diff))\n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotAlmostEqual(self,first,second,places=None ,msg=None ,\n delta=None ):\n ''\n\n\n\n\n\n\n\n\n \n if delta is not None and places is not None :\n raise TypeError(\"specify delta or places not both\")\n diff=abs(first -second)\n if delta is not None :\n if not (first ==second)and diff >delta:\n return\n standardMsg='%s == %s within %s delta (%s difference)'%(\n safe_repr(first),\n safe_repr(second),\n safe_repr(delta),\n safe_repr(diff))\n else :\n if places is None :\n places=7\n if not (first ==second)and round(diff,places)!=0:\n return\n standardMsg='%s == %s within %r places'%(safe_repr(first),\n safe_repr(second),\n places)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertSequenceEqual(self,seq1,seq2,msg=None ,seq_type=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if seq_type is not None :\n seq_type_name=seq_type.__name__\n if not isinstance(seq1,seq_type):\n raise self.failureException('First sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq1)))\n if not isinstance(seq2,seq_type):\n raise self.failureException('Second sequence is not a %s: %s'\n %(seq_type_name,safe_repr(seq2)))\n else :\n seq_type_name=\"sequence\"\n \n differing=None\n try :\n len1=len(seq1)\n except (TypeError,NotImplementedError):\n differing='First %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None :\n try :\n len2=len(seq2)\n except (TypeError,NotImplementedError):\n differing='Second %s has no length. Non-sequence?'%(\n seq_type_name)\n \n if differing is None :\n if seq1 ==seq2:\n return\n \n differing='%ss differ: %s != %s\\n'%(\n (seq_type_name.capitalize(),)+\n _common_shorten_repr(seq1,seq2))\n \n for i in range(min(len1,len2)):\n try :\n item1=seq1[i]\n except (TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of first %s\\n'%\n (i,seq_type_name))\n break\n \n try :\n item2=seq2[i]\n except (TypeError,IndexError,NotImplementedError):\n differing +=('\\nUnable to index element %d of second %s\\n'%\n (i,seq_type_name))\n break\n \n if item1 !=item2:\n differing +=('\\nFirst differing element %d:\\n%s\\n%s\\n'%\n ((i,)+_common_shorten_repr(item1,item2)))\n break\n else :\n if (len1 ==len2 and seq_type is None and\n type(seq1)!=type(seq2)):\n \n return\n \n if len1 >len2:\n differing +=('\\nFirst %s contains %d additional '\n 'elements.\\n'%(seq_type_name,len1 -len2))\n try :\n differing +=('First extra element %d:\\n%s\\n'%\n (len2,safe_repr(seq1[len2])))\n except (TypeError,IndexError,NotImplementedError):\n differing +=('Unable to index element %d '\n 'of first %s\\n'%(len2,seq_type_name))\n elif len1 self._diffThreshold or\n len(second)>self._diffThreshold):\n self._baseAssertEqual(first,second,msg)\n firstlines=first.splitlines(keepends=True )\n secondlines=second.splitlines(keepends=True )\n if len(firstlines)==1 and first.strip('\\r\\n')==first:\n firstlines=[first+'\\n']\n secondlines=[second+'\\n']\n standardMsg='%s != %s'%_common_shorten_repr(first,second)\n diff='\\n'+''.join(difflib.ndiff(firstlines,secondlines))\n standardMsg=self._truncateMessage(standardMsg,diff)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertLess(self,a,b,msg=None ):\n ''\n if not a b:\n standardMsg='%s not greater than %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertGreaterEqual(self,a,b,msg=None ):\n ''\n if not a >=b:\n standardMsg='%s not greater than or equal to %s'%(safe_repr(a),safe_repr(b))\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNone(self,obj,msg=None ):\n ''\n if obj is not None :\n standardMsg='%s is not None'%(safe_repr(obj),)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsNotNone(self,obj,msg=None ):\n ''\n if obj is None :\n standardMsg='unexpectedly None'\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertIsInstance(self,obj,cls,msg=None ):\n ''\n \n if not isinstance(obj,cls):\n standardMsg='%s is not an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertNotIsInstance(self,obj,cls,msg=None ):\n ''\n if isinstance(obj,cls):\n standardMsg='%s is an instance of %r'%(safe_repr(obj),cls)\n self.fail(self._formatMessage(msg,standardMsg))\n \n def assertRaisesRegex(self,expected_exception,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n \n context=_AssertRaisesContext(expected_exception,self,expected_regex)\n return context.handle('assertRaisesRegex',args,kwargs)\n \n def assertWarnsRegex(self,expected_warning,expected_regex,\n *args,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n \n context=_AssertWarnsContext(expected_warning,self,expected_regex)\n return context.handle('assertWarnsRegex',args,kwargs)\n \n def assertRegex(self,text,expected_regex,msg=None ):\n ''\n if isinstance(expected_regex,(str,bytes)):\n assert expected_regex,\"expected_regex must not be empty.\"\n expected_regex=re.compile(expected_regex)\n if not expected_regex.search(text):\n standardMsg=\"Regex didn't match: %r not found in %r\"%(\n expected_regex.pattern,text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n def assertNotRegex(self,text,unexpected_regex,msg=None ):\n ''\n if isinstance(unexpected_regex,(str,bytes)):\n unexpected_regex=re.compile(unexpected_regex)\n match=unexpected_regex.search(text)\n if match:\n standardMsg='Regex matched: %r matches %r in %r'%(\n text[match.start():match.end()],\n unexpected_regex.pattern,\n text)\n \n msg=self._formatMessage(msg,standardMsg)\n raise self.failureException(msg)\n \n \n def _deprecate(original_func):\n def deprecated_func(*args,**kwargs):\n warnings.warn(\n 'Please use {0} instead.'.format(original_func.__name__),\n DeprecationWarning,2)\n return original_func(*args,**kwargs)\n return deprecated_func\n \n \n failUnlessEqual=assertEquals=_deprecate(assertEqual)\n failIfEqual=assertNotEquals=_deprecate(assertNotEqual)\n failUnlessAlmostEqual=assertAlmostEquals=_deprecate(assertAlmostEqual)\n failIfAlmostEqual=assertNotAlmostEquals=_deprecate(assertNotAlmostEqual)\n failUnless=assert_=_deprecate(assertTrue)\n failUnlessRaises=_deprecate(assertRaises)\n failIf=_deprecate(assertFalse)\n assertRaisesRegexp=_deprecate(assertRaisesRegex)\n assertRegexpMatches=_deprecate(assertRegex)\n assertNotRegexpMatches=_deprecate(assertNotRegex)\n \n \n \nclass FunctionTestCase(TestCase):\n ''\n\n\n\n\n\n \n \n def __init__(self,testFunc,setUp=None ,tearDown=None ,description=None ):\n super(FunctionTestCase,self).__init__()\n self._setUpFunc=setUp\n self._tearDownFunc=tearDown\n self._testFunc=testFunc\n self._description=description\n \n def setUp(self):\n if self._setUpFunc is not None :\n self._setUpFunc()\n \n def tearDown(self):\n if self._tearDownFunc is not None :\n self._tearDownFunc()\n \n def runTest(self):\n self._testFunc()\n \n def id(self):\n return self._testFunc.__name__\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n \n return self._setUpFunc ==other._setUpFunc and\\\n self._tearDownFunc ==other._tearDownFunc and\\\n self._testFunc ==other._testFunc and\\\n self._description ==other._description\n \n def __hash__(self):\n return hash((type(self),self._setUpFunc,self._tearDownFunc,\n self._testFunc,self._description))\n \n def __str__(self):\n return \"%s (%s)\"%(strclass(self.__class__),\n self._testFunc.__name__)\n \n def __repr__(self):\n return \"<%s tec=%s>\"%(strclass(self.__class__),\n self._testFunc)\n \n def shortDescription(self):\n if self._description is not None :\n return self._description\n doc=self._testFunc.__doc__\n return doc and doc.split(\"\\n\")[0].strip()or None\n \n \nclass _SubTest(TestCase):\n\n def __init__(self,test_case,message,params):\n super().__init__()\n self._message=message\n self.test_case=test_case\n self.params=params\n self.failureException=test_case.failureException\n \n def runTest(self):\n raise NotImplementedError(\"subtests cannot be run directly\")\n \n def _subDescription(self):\n parts=[]\n if self._message is not _subtest_msg_sentinel:\n parts.append(\"[{}]\".format(self._message))\n if self.params:\n params_desc=', '.join(\n \"{}={!r}\".format(k,v)\n for (k,v)in self.params.items())\n parts.append(\"({})\".format(params_desc))\n return \" \".join(parts)or '()'\n \n def id(self):\n return \"{} {}\".format(self.test_case.id(),self._subDescription())\n \n def shortDescription(self):\n ''\n\n \n return self.test_case.shortDescription()\n \n def __str__(self):\n return \"{} {}\".format(self.test_case,self._subDescription())\n", ["collections", "contextlib", "difflib", "functools", "logging", "pprint", "re", "sys", "traceback", "types", "unittest", "unittest.result", "unittest.util", "warnings"]], "unittest.loader": [".py", "''\n\nimport os\nimport re\nimport sys\nimport traceback\nimport types\nimport functools\nimport warnings\n\nfrom fnmatch import fnmatch,fnmatchcase\n\nfrom . import case,suite,util\n\n__unittest=True\n\n\n\n\nVALID_MODULE_NAME=re.compile(r'[_a-z]\\w*\\.py$',re.IGNORECASE)\n\n\nclass _FailedTest(case.TestCase):\n _testMethodName=None\n \n def __init__(self,method_name,exception):\n self._exception=exception\n super(_FailedTest,self).__init__(method_name)\n \n def __getattr__(self,name):\n if name !=self._testMethodName:\n return super(_FailedTest,self).__getattr__(name)\n def testFailure():\n raise self._exception\n return testFailure\n \n \ndef _make_failed_import_test(name,suiteClass):\n message='Failed to import test module: %s\\n%s'%(\n name,traceback.format_exc())\n return _make_failed_test(name,ImportError(message),suiteClass,message)\n \ndef _make_failed_load_tests(name,exception,suiteClass):\n message='Failed to call load_tests:\\n%s'%(traceback.format_exc(),)\n return _make_failed_test(\n name,exception,suiteClass,message)\n \ndef _make_failed_test(methodname,exception,suiteClass,message):\n test=_FailedTest(methodname,exception)\n return suiteClass((test,)),message\n \ndef _make_skipped_test(methodname,exception,suiteClass):\n @case.skip(str(exception))\n def testSkipped(self):\n pass\n attrs={methodname:testSkipped}\n TestClass=type(\"ModuleSkipped\",(case.TestCase,),attrs)\n return suiteClass((TestClass(methodname),))\n \ndef _jython_aware_splitext(path):\n if path.lower().endswith('$py.class'):\n return path[:-9]\n return os.path.splitext(path)[0]\n \n \nclass TestLoader(object):\n ''\n\n\n \n testMethodPrefix='test'\n sortTestMethodsUsing=staticmethod(util.three_way_cmp)\n testNamePatterns=None\n suiteClass=suite.TestSuite\n _top_level_dir=None\n \n def __init__(self):\n super(TestLoader,self).__init__()\n self.errors=[]\n \n \n self._loading_packages=set()\n \n def loadTestsFromTestCase(self,testCaseClass):\n ''\n if issubclass(testCaseClass,suite.TestSuite):\n raise TypeError(\"Test cases should not be derived from \"\n \"TestSuite. Maybe you meant to derive from \"\n \"TestCase?\")\n testCaseNames=self.getTestCaseNames(testCaseClass)\n if not testCaseNames and hasattr(testCaseClass,'runTest'):\n testCaseNames=['runTest']\n loaded_suite=self.suiteClass(map(testCaseClass,testCaseNames))\n return loaded_suite\n \n \n \n def loadTestsFromModule(self,module,*args,pattern=None ,**kws):\n ''\n \n \n \n \n if len(args)>0 or 'use_load_tests'in kws:\n warnings.warn('use_load_tests is deprecated and ignored',\n DeprecationWarning)\n kws.pop('use_load_tests',None )\n if len(args)>1:\n \n \n complaint=len(args)+1\n raise TypeError('loadTestsFromModule() takes 1 positional argument but {} were given'.format(complaint))\n if len(kws)!=0:\n \n \n \n \n complaint=sorted(kws)[0]\n raise TypeError(\"loadTestsFromModule() got an unexpected keyword argument '{}'\".format(complaint))\n tests=[]\n for name in dir(module):\n obj=getattr(module,name)\n if isinstance(obj,type)and issubclass(obj,case.TestCase):\n tests.append(self.loadTestsFromTestCase(obj))\n \n load_tests=getattr(module,'load_tests',None )\n tests=self.suiteClass(tests)\n if load_tests is not None :\n try :\n return load_tests(self,tests,pattern)\n except Exception as e:\n error_case,error_message=_make_failed_load_tests(\n module.__name__,e,self.suiteClass)\n self.errors.append(error_message)\n return error_case\n return tests\n \n def loadTestsFromName(self,name,module=None ):\n ''\n\n\n\n\n\n\n \n parts=name.split('.')\n error_case,error_message=None ,None\n if module is None :\n parts_copy=parts[:]\n while parts_copy:\n try :\n module_name='.'.join(parts_copy)\n module=__import__(module_name)\n break\n except ImportError:\n next_attribute=parts_copy.pop()\n \n error_case,error_message=_make_failed_import_test(\n next_attribute,self.suiteClass)\n if not parts_copy:\n \n self.errors.append(error_message)\n return error_case\n parts=parts[1:]\n obj=module\n for part in parts:\n try :\n parent,obj=obj,getattr(obj,part)\n except AttributeError as e:\n \n if (getattr(obj,'__path__',None )is not None\n and error_case is not None ):\n \n \n \n \n \n self.errors.append(error_message)\n return error_case\n else :\n \n error_case,error_message=_make_failed_test(\n part,e,self.suiteClass,\n 'Failed to access attribute:\\n%s'%(\n traceback.format_exc(),))\n self.errors.append(error_message)\n return error_case\n \n if isinstance(obj,types.ModuleType):\n return self.loadTestsFromModule(obj)\n elif isinstance(obj,type)and issubclass(obj,case.TestCase):\n return self.loadTestsFromTestCase(obj)\n elif (isinstance(obj,types.FunctionType)and\n isinstance(parent,type)and\n issubclass(parent,case.TestCase)):\n name=parts[-1]\n inst=parent(name)\n \n if not isinstance(getattr(inst,name),types.FunctionType):\n return self.suiteClass([inst])\n elif isinstance(obj,suite.TestSuite):\n return obj\n if callable(obj):\n test=obj()\n if isinstance(test,suite.TestSuite):\n return test\n elif isinstance(test,case.TestCase):\n return self.suiteClass([test])\n else :\n raise TypeError(\"calling %s returned %s, not a test\"%\n (obj,test))\n else :\n raise TypeError(\"don't know how to make test from: %s\"%obj)\n \n def loadTestsFromNames(self,names,module=None ):\n ''\n\n \n suites=[self.loadTestsFromName(name,module)for name in names]\n return self.suiteClass(suites)\n \n def getTestCaseNames(self,testCaseClass):\n ''\n \n def shouldIncludeMethod(attrname):\n if not attrname.startswith(self.testMethodPrefix):\n return False\n testFunc=getattr(testCaseClass,attrname)\n if not callable(testFunc):\n return False\n fullName=f'%s.%s.%s'%(\n testCaseClass.__module__,testCaseClass.__qualname__,attrname\n )\n return self.testNamePatterns is None or\\\n any(fnmatchcase(fullName,pattern)for pattern in self.testNamePatterns)\n testFnNames=list(filter(shouldIncludeMethod,dir(testCaseClass)))\n if self.sortTestMethodsUsing:\n testFnNames.sort(key=functools.cmp_to_key(self.sortTestMethodsUsing))\n return testFnNames\n \n def discover(self,start_dir,pattern='test*.py',top_level_dir=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n set_implicit_top=False\n if top_level_dir is None and self._top_level_dir is not None :\n \n top_level_dir=self._top_level_dir\n elif top_level_dir is None :\n set_implicit_top=True\n top_level_dir=start_dir\n \n top_level_dir=os.path.abspath(top_level_dir)\n \n if not top_level_dir in sys.path:\n \n \n \n \n sys.path.insert(0,top_level_dir)\n self._top_level_dir=top_level_dir\n \n is_not_importable=False\n is_namespace=False\n tests=[]\n if os.path.isdir(os.path.abspath(start_dir)):\n start_dir=os.path.abspath(start_dir)\n if start_dir !=top_level_dir:\n is_not_importable=not os.path.isfile(os.path.join(start_dir,'__init__.py'))\n else :\n \n try :\n __import__(start_dir)\n except ImportError:\n is_not_importable=True\n else :\n the_module=sys.modules[start_dir]\n top_part=start_dir.split('.')[0]\n try :\n start_dir=os.path.abspath(\n os.path.dirname((the_module.__file__)))\n except AttributeError:\n \n try :\n spec=the_module.__spec__\n except AttributeError:\n spec=None\n \n if spec and spec.loader is None :\n if spec.submodule_search_locations is not None :\n is_namespace=True\n \n for path in the_module.__path__:\n if (not set_implicit_top and\n not path.startswith(top_level_dir)):\n continue\n self._top_level_dir=\\\n (path.split(the_module.__name__\n .replace(\".\",os.path.sep))[0])\n tests.extend(self._find_tests(path,\n pattern,\n namespace=True ))\n elif the_module.__name__ in sys.builtin_module_names:\n \n raise TypeError('Can not use builtin modules '\n 'as dotted module names')from None\n else :\n raise TypeError(\n 'don\\'t know how to discover from {!r}'\n .format(the_module))from None\n \n if set_implicit_top:\n if not is_namespace:\n self._top_level_dir=\\\n self._get_directory_containing_module(top_part)\n sys.path.remove(top_level_dir)\n else :\n sys.path.remove(top_level_dir)\n \n if is_not_importable:\n raise ImportError('Start directory is not importable: %r'%start_dir)\n \n if not is_namespace:\n tests=list(self._find_tests(start_dir,pattern))\n return self.suiteClass(tests)\n \n def _get_directory_containing_module(self,module_name):\n module=sys.modules[module_name]\n full_path=os.path.abspath(module.__file__)\n \n if os.path.basename(full_path).lower().startswith('__init__.py'):\n return os.path.dirname(os.path.dirname(full_path))\n else :\n \n \n \n return os.path.dirname(full_path)\n \n def _get_name_from_path(self,path):\n if path ==self._top_level_dir:\n return '.'\n path=_jython_aware_splitext(os.path.normpath(path))\n \n _relpath=os.path.relpath(path,self._top_level_dir)\n assert not os.path.isabs(_relpath),\"Path must be within the project\"\n assert not _relpath.startswith('..'),\"Path must be within the project\"\n \n name=_relpath.replace(os.path.sep,'.')\n return name\n \n def _get_module_from_name(self,name):\n __import__(name)\n return sys.modules[name]\n \n def _match_path(self,path,full_path,pattern):\n \n return fnmatch(path,pattern)\n \n def _find_tests(self,start_dir,pattern,namespace=False ):\n ''\n \n name=self._get_name_from_path(start_dir)\n \n \n if name !='.'and name not in self._loading_packages:\n \n \n tests,should_recurse=self._find_test_path(\n start_dir,pattern,namespace)\n if tests is not None :\n yield tests\n if not should_recurse:\n \n \n return\n \n paths=sorted(os.listdir(start_dir))\n for path in paths:\n full_path=os.path.join(start_dir,path)\n tests,should_recurse=self._find_test_path(\n full_path,pattern,namespace)\n if tests is not None :\n yield tests\n if should_recurse:\n \n name=self._get_name_from_path(full_path)\n self._loading_packages.add(name)\n try :\n yield from self._find_tests(full_path,pattern,namespace)\n finally :\n self._loading_packages.discard(name)\n \n def _find_test_path(self,full_path,pattern,namespace=False ):\n ''\n\n\n\n\n\n \n basename=os.path.basename(full_path)\n if os.path.isfile(full_path):\n if not VALID_MODULE_NAME.match(basename):\n \n return None ,False\n if not self._match_path(basename,full_path,pattern):\n return None ,False\n \n name=self._get_name_from_path(full_path)\n try :\n module=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except :\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else :\n mod_file=os.path.abspath(\n getattr(module,'__file__',full_path))\n realpath=_jython_aware_splitext(\n os.path.realpath(mod_file))\n fullpath_noext=_jython_aware_splitext(\n os.path.realpath(full_path))\n if realpath.lower()!=fullpath_noext.lower():\n module_dir=os.path.dirname(realpath)\n mod_name=_jython_aware_splitext(\n os.path.basename(full_path))\n expected_dir=os.path.dirname(full_path)\n msg=(\"%r module incorrectly imported from %r. Expected \"\n \"%r. Is this module globally installed?\")\n raise ImportError(\n msg %(mod_name,module_dir,expected_dir))\n return self.loadTestsFromModule(module,pattern=pattern),False\n elif os.path.isdir(full_path):\n if (not namespace and\n not os.path.isfile(os.path.join(full_path,'__init__.py'))):\n return None ,False\n \n load_tests=None\n tests=None\n name=self._get_name_from_path(full_path)\n try :\n package=self._get_module_from_name(name)\n except case.SkipTest as e:\n return _make_skipped_test(name,e,self.suiteClass),False\n except :\n error_case,error_message=\\\n _make_failed_import_test(name,self.suiteClass)\n self.errors.append(error_message)\n return error_case,False\n else :\n load_tests=getattr(package,'load_tests',None )\n \n self._loading_packages.add(name)\n try :\n tests=self.loadTestsFromModule(package,pattern=pattern)\n if load_tests is not None :\n \n return tests,False\n return tests,True\n finally :\n self._loading_packages.discard(name)\n else :\n return None ,False\n \n \ndefaultTestLoader=TestLoader()\n\n\ndef _makeLoader(prefix,sortUsing,suiteClass=None ,testNamePatterns=None ):\n loader=TestLoader()\n loader.sortTestMethodsUsing=sortUsing\n loader.testMethodPrefix=prefix\n loader.testNamePatterns=testNamePatterns\n if suiteClass:\n loader.suiteClass=suiteClass\n return loader\n \ndef getTestCaseNames(testCaseClass,prefix,sortUsing=util.three_way_cmp,testNamePatterns=None ):\n return _makeLoader(prefix,sortUsing,testNamePatterns=testNamePatterns).getTestCaseNames(testCaseClass)\n \ndef makeSuite(testCaseClass,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromTestCase(\n testCaseClass)\n \ndef findTestCases(module,prefix='test',sortUsing=util.three_way_cmp,\nsuiteClass=suite.TestSuite):\n return _makeLoader(prefix,sortUsing,suiteClass).loadTestsFromModule(\\\n module)\n", ["fnmatch", "functools", "os", "re", "sys", "traceback", "types", "unittest", "unittest.case", "unittest.suite", "unittest.util", "warnings"]], "unittest.main": [".py", "''\n\nimport sys\nimport argparse\nimport os\n\nfrom . import loader,runner\nfrom .signals import installHandler\n\n__unittest=True\n\nMAIN_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s test_module - run tests from test_module\n %(prog)s module.TestClass - run tests from module.TestClass\n %(prog)s module.Class.test_method - run specified test method\n %(prog)s path/to/test_file.py - run tests from test_file.py\n\"\"\"\n\nMODULE_EXAMPLES=\"\"\"\\\nExamples:\n %(prog)s - run default set of tests\n %(prog)s MyTestSuite - run suite 'MyTestSuite'\n %(prog)s MyTestCase.testSomething - run MyTestCase.testSomething\n %(prog)s MyTestCase - run all 'test*' test methods\n in MyTestCase\n\"\"\"\n\ndef _convert_name(name):\n\n\n\n\n if os.path.isfile(name)and name.lower().endswith('.py'):\n if os.path.isabs(name):\n rel_path=os.path.relpath(name,os.getcwd())\n if os.path.isabs(rel_path)or rel_path.startswith(os.pardir):\n return name\n name=rel_path\n \n \n return name[:-3].replace('\\\\','.').replace('/','.')\n return name\n \ndef _convert_names(names):\n return [_convert_name(name)for name in names]\n \n \ndef _convert_select_pattern(pattern):\n if not '*'in pattern:\n pattern='*%s*'%pattern\n return pattern\n \n \nclass TestProgram(object):\n ''\n\n \n \n module=None\n verbosity=1\n failfast=catchbreak=buffer=progName=warnings=testNamePatterns=None\n _discovery_parser=None\n \n def __init__(self,module='__main__',defaultTest=None ,argv=None ,\n testRunner=None ,testLoader=loader.defaultTestLoader,\n exit=True ,verbosity=1,failfast=None ,catchbreak=None ,\n buffer=None ,warnings=None ,*,tb_locals=False ):\n if isinstance(module,str):\n self.module=__import__(module)\n for part in module.split('.')[1:]:\n self.module=getattr(self.module,part)\n else :\n self.module=module\n if argv is None :\n argv=sys.argv\n \n self.exit=exit\n self.failfast=failfast\n self.catchbreak=catchbreak\n self.verbosity=verbosity\n self.buffer=buffer\n self.tb_locals=tb_locals\n if warnings is None and not sys.warnoptions:\n \n \n \n self.warnings='default'\n else :\n \n \n \n \n \n self.warnings=warnings\n self.defaultTest=defaultTest\n self.testRunner=testRunner\n self.testLoader=testLoader\n self.progName=os.path.basename(argv[0])\n self.parseArgs(argv)\n self.runTests()\n \n def usageExit(self,msg=None ):\n if msg:\n print(msg)\n if self._discovery_parser is None :\n self._initArgParsers()\n self._print_help()\n sys.exit(2)\n \n def _print_help(self,*args,**kwargs):\n if self.module is None :\n print(self._main_parser.format_help())\n print(MAIN_EXAMPLES %{'prog':self.progName})\n self._discovery_parser.print_help()\n else :\n print(self._main_parser.format_help())\n print(MODULE_EXAMPLES %{'prog':self.progName})\n \n def parseArgs(self,argv):\n self._initArgParsers()\n if self.module is None :\n if len(argv)>1 and argv[1].lower()=='discover':\n self._do_discovery(argv[2:])\n return\n self._main_parser.parse_args(argv[1:],self)\n if not self.tests:\n \n \n self._do_discovery([])\n return\n else :\n self._main_parser.parse_args(argv[1:],self)\n \n if self.tests:\n self.testNames=_convert_names(self.tests)\n if __name__ =='__main__':\n \n self.module=None\n elif self.defaultTest is None :\n \n self.testNames=None\n elif isinstance(self.defaultTest,str):\n self.testNames=(self.defaultTest,)\n else :\n self.testNames=list(self.defaultTest)\n self.createTests()\n \n def createTests(self,from_discovery=False ,Loader=None ):\n if self.testNamePatterns:\n self.testLoader.testNamePatterns=self.testNamePatterns\n if from_discovery:\n loader=self.testLoader if Loader is None else Loader()\n self.test=loader.discover(self.start,self.pattern,self.top)\n elif self.testNames is None :\n self.test=self.testLoader.loadTestsFromModule(self.module)\n else :\n self.test=self.testLoader.loadTestsFromNames(self.testNames,\n self.module)\n \n def _initArgParsers(self):\n parent_parser=self._getParentArgParser()\n self._main_parser=self._getMainArgParser(parent_parser)\n self._discovery_parser=self._getDiscoveryArgParser(parent_parser)\n \n def _getParentArgParser(self):\n parser=argparse.ArgumentParser(add_help=False )\n \n parser.add_argument('-v','--verbose',dest='verbosity',\n action='store_const',const=2,\n help='Verbose output')\n parser.add_argument('-q','--quiet',dest='verbosity',\n action='store_const',const=0,\n help='Quiet output')\n parser.add_argument('--locals',dest='tb_locals',\n action='store_true',\n help='Show local variables in tracebacks')\n if self.failfast is None :\n parser.add_argument('-f','--failfast',dest='failfast',\n action='store_true',\n help='Stop on first fail or error')\n self.failfast=False\n if self.catchbreak is None :\n parser.add_argument('-c','--catch',dest='catchbreak',\n action='store_true',\n help='Catch Ctrl-C and display results so far')\n self.catchbreak=False\n if self.buffer is None :\n parser.add_argument('-b','--buffer',dest='buffer',\n action='store_true',\n help='Buffer stdout and stderr during tests')\n self.buffer=False\n if self.testNamePatterns is None :\n parser.add_argument('-k',dest='testNamePatterns',\n action='append',type=_convert_select_pattern,\n help='Only run tests which match the given substring')\n self.testNamePatterns=[]\n \n return parser\n \n def _getMainArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog=self.progName\n parser.print_help=self._print_help\n \n parser.add_argument('tests',nargs='*',\n help='a list of any number of test modules, '\n 'classes and test methods.')\n \n return parser\n \n def _getDiscoveryArgParser(self,parent):\n parser=argparse.ArgumentParser(parents=[parent])\n parser.prog='%s discover'%self.progName\n parser.epilog=('For test discovery all test modules must be '\n 'importable from the top level directory of the '\n 'project.')\n \n parser.add_argument('-s','--start-directory',dest='start',\n help=\"Directory to start discovery ('.' default)\")\n parser.add_argument('-p','--pattern',dest='pattern',\n help=\"Pattern to match tests ('test*.py' default)\")\n parser.add_argument('-t','--top-level-directory',dest='top',\n help='Top level directory of project (defaults to '\n 'start directory)')\n for arg in ('start','pattern','top'):\n parser.add_argument(arg,nargs='?',\n default=argparse.SUPPRESS,\n help=argparse.SUPPRESS)\n \n return parser\n \n def _do_discovery(self,argv,Loader=None ):\n self.start='.'\n self.pattern='test*.py'\n self.top=None\n if argv is not None :\n \n if self._discovery_parser is None :\n \n self._initArgParsers()\n self._discovery_parser.parse_args(argv,self)\n \n self.createTests(from_discovery=True ,Loader=Loader)\n \n def runTests(self):\n if self.catchbreak:\n installHandler()\n if self.testRunner is None :\n self.testRunner=runner.TextTestRunner\n if isinstance(self.testRunner,type):\n try :\n try :\n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings,\n tb_locals=self.tb_locals)\n except TypeError:\n \n testRunner=self.testRunner(verbosity=self.verbosity,\n failfast=self.failfast,\n buffer=self.buffer,\n warnings=self.warnings)\n except TypeError:\n \n testRunner=self.testRunner()\n else :\n \n testRunner=self.testRunner\n self.result=testRunner.run(self.test)\n if self.exit:\n sys.exit(not self.result.wasSuccessful())\n \nmain=TestProgram\n", ["argparse", "os", "sys", "unittest", "unittest.loader", "unittest.runner", "unittest.signals"]], "unittest.mock": [".py", "\n\n\n\n\n\n__all__=(\n'Mock',\n'MagicMock',\n'patch',\n'sentinel',\n'DEFAULT',\n'ANY',\n'call',\n'create_autospec',\n'AsyncMock',\n'FILTER_DIR',\n'NonCallableMock',\n'NonCallableMagicMock',\n'mock_open',\n'PropertyMock',\n'seal',\n)\n\n\nimport asyncio\nimport contextlib\nimport io\nimport inspect\nimport pprint\nimport sys\nimport builtins\nfrom asyncio import iscoroutinefunction\nfrom types import CodeType,ModuleType,MethodType\nfrom unittest.util import safe_repr\nfrom functools import wraps,partial\n\n\n_builtins={name for name in dir(builtins)if not name.startswith('_')}\n\nFILTER_DIR=True\n\n\n\n_safe_super=super\n\ndef _is_async_obj(obj):\n if _is_instance_mock(obj)and not isinstance(obj,AsyncMock):\n return False\n if hasattr(obj,'__func__'):\n obj=getattr(obj,'__func__')\n return iscoroutinefunction(obj)or inspect.isawaitable(obj)\n \n \ndef _is_async_func(func):\n if getattr(func,'__code__',None ):\n return iscoroutinefunction(func)\n else :\n return False\n \n \ndef _is_instance_mock(obj):\n\n\n return issubclass(type(obj),NonCallableMock)\n \n \ndef _is_exception(obj):\n return (\n isinstance(obj,BaseException)or\n isinstance(obj,type)and issubclass(obj,BaseException)\n )\n \n \ndef _extract_mock(obj):\n\n\n if isinstance(obj,FunctionTypes)and hasattr(obj,'mock'):\n return obj.mock\n else :\n return obj\n \n \ndef _get_signature_object(func,as_instance,eat_self):\n ''\n\n\n\n \n if isinstance(func,type)and not as_instance:\n \n func=func.__init__\n \n eat_self=True\n elif not isinstance(func,FunctionTypes):\n \n \n try :\n func=func.__call__\n except AttributeError:\n return None\n if eat_self:\n sig_func=partial(func,None )\n else :\n sig_func=func\n try :\n return func,inspect.signature(sig_func)\n except ValueError:\n \n return None\n \n \ndef _check_signature(func,mock,skipfirst,instance=False ):\n sig=_get_signature_object(func,instance,skipfirst)\n if sig is None :\n return\n func,sig=sig\n def checksig(self,/,*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n type(mock)._mock_check_sig=checksig\n type(mock).__signature__=sig\n \n \ndef _copy_func_details(func,funcopy):\n\n\n for attribute in (\n '__name__','__doc__','__text_signature__',\n '__module__','__defaults__','__kwdefaults__',\n ):\n try :\n setattr(funcopy,attribute,getattr(func,attribute))\n except AttributeError:\n pass\n \n \ndef _callable(obj):\n if isinstance(obj,type):\n return True\n if isinstance(obj,(staticmethod,classmethod,MethodType)):\n return _callable(obj.__func__)\n if getattr(obj,'__call__',None )is not None :\n return True\n return False\n \n \ndef _is_list(obj):\n\n\n return type(obj)in (list,tuple)\n \n \ndef _instance_callable(obj):\n ''\n \n if not isinstance(obj,type):\n \n return getattr(obj,'__call__',None )is not None\n \n \n \n for base in (obj,)+obj.__mro__:\n if base.__dict__.get('__call__')is not None :\n return True\n return False\n \n \ndef _set_signature(mock,original,instance=False ):\n\n\n\n\n skipfirst=isinstance(original,type)\n result=_get_signature_object(original,instance,skipfirst)\n if result is None :\n return mock\n func,sig=result\n def checksig(*args,**kwargs):\n sig.bind(*args,**kwargs)\n _copy_func_details(func,checksig)\n \n name=original.__name__\n if not name.isidentifier():\n name='funcopy'\n context={'_checksig_':checksig,'mock':mock}\n src=\"\"\"def %s(*args, **kwargs):\n _checksig_(*args, **kwargs)\n return mock(*args, **kwargs)\"\"\"%name\n exec(src,context)\n funcopy=context[name]\n _setup_func(funcopy,mock,sig)\n return funcopy\n \n \ndef _setup_func(funcopy,mock,sig):\n funcopy.mock=mock\n \n def assert_called_with(*args,**kwargs):\n return mock.assert_called_with(*args,**kwargs)\n def assert_called(*args,**kwargs):\n return mock.assert_called(*args,**kwargs)\n def assert_not_called(*args,**kwargs):\n return mock.assert_not_called(*args,**kwargs)\n def assert_called_once(*args,**kwargs):\n return mock.assert_called_once(*args,**kwargs)\n def assert_called_once_with(*args,**kwargs):\n return mock.assert_called_once_with(*args,**kwargs)\n def assert_has_calls(*args,**kwargs):\n return mock.assert_has_calls(*args,**kwargs)\n def assert_any_call(*args,**kwargs):\n return mock.assert_any_call(*args,**kwargs)\n def reset_mock():\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n mock.reset_mock()\n ret=funcopy.return_value\n if _is_instance_mock(ret)and not ret is mock:\n ret.reset_mock()\n \n funcopy.called=False\n funcopy.call_count=0\n funcopy.call_args=None\n funcopy.call_args_list=_CallList()\n funcopy.method_calls=_CallList()\n funcopy.mock_calls=_CallList()\n \n funcopy.return_value=mock.return_value\n funcopy.side_effect=mock.side_effect\n funcopy._mock_children=mock._mock_children\n \n funcopy.assert_called_with=assert_called_with\n funcopy.assert_called_once_with=assert_called_once_with\n funcopy.assert_has_calls=assert_has_calls\n funcopy.assert_any_call=assert_any_call\n funcopy.reset_mock=reset_mock\n funcopy.assert_called=assert_called\n funcopy.assert_not_called=assert_not_called\n funcopy.assert_called_once=assert_called_once\n funcopy.__signature__=sig\n \n mock._mock_delegate=funcopy\n \n \ndef _setup_async_mock(mock):\n mock._is_coroutine=asyncio.coroutines._is_coroutine\n mock.await_count=0\n mock.await_args=None\n mock.await_args_list=_CallList()\n \n \n \n \n def wrapper(attr,/,*args,**kwargs):\n return getattr(mock.mock,attr)(*args,**kwargs)\n \n for attribute in ('assert_awaited',\n 'assert_awaited_once',\n 'assert_awaited_with',\n 'assert_awaited_once_with',\n 'assert_any_await',\n 'assert_has_awaits',\n 'assert_not_awaited'):\n \n \n \n \n \n setattr(mock,attribute,partial(wrapper,attribute))\n \n \ndef _is_magic(name):\n return '__%s__'%name[2:-2]==name\n \n \nclass _SentinelObject(object):\n ''\n def __init__(self,name):\n self.name=name\n \n def __repr__(self):\n return 'sentinel.%s'%self.name\n \n def __reduce__(self):\n return 'sentinel.%s'%self.name\n \n \nclass _Sentinel(object):\n ''\n def __init__(self):\n self._sentinels={}\n \n def __getattr__(self,name):\n if name =='__bases__':\n \n raise AttributeError\n return self._sentinels.setdefault(name,_SentinelObject(name))\n \n def __reduce__(self):\n return 'sentinel'\n \n \nsentinel=_Sentinel()\n\nDEFAULT=sentinel.DEFAULT\n_missing=sentinel.MISSING\n_deleted=sentinel.DELETED\n\n\n_allowed_names={\n'return_value','_mock_return_value','side_effect',\n'_mock_side_effect','_mock_parent','_mock_new_parent',\n'_mock_name','_mock_new_name'\n}\n\n\ndef _delegating_property(name):\n _allowed_names.add(name)\n _the_name='_mock_'+name\n def _get(self,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None :\n return getattr(self,_the_name)\n return getattr(sig,name)\n def _set(self,value,name=name,_the_name=_the_name):\n sig=self._mock_delegate\n if sig is None :\n self.__dict__[_the_name]=value\n else :\n setattr(sig,name,value)\n \n return property(_get,_set)\n \n \n \nclass _CallList(list):\n\n def __contains__(self,value):\n if not isinstance(value,list):\n return list.__contains__(self,value)\n len_value=len(value)\n len_self=len(self)\n if len_value >len_self:\n return False\n \n for i in range(0,len_self -len_value+1):\n sub_list=self[i:i+len_value]\n if sub_list ==value:\n return True\n return False\n \n def __repr__(self):\n return pprint.pformat(list(self))\n \n \ndef _check_and_set_parent(parent,value,name,new_name):\n value=_extract_mock(value)\n \n if not _is_instance_mock(value):\n return False\n if ((value._mock_name or value._mock_new_name)or\n (value._mock_parent is not None )or\n (value._mock_new_parent is not None )):\n return False\n \n _parent=parent\n while _parent is not None :\n \n \n if _parent is value:\n return False\n _parent=_parent._mock_new_parent\n \n if new_name:\n value._mock_new_parent=parent\n value._mock_new_name=new_name\n if name:\n value._mock_parent=parent\n value._mock_name=name\n return True\n \n \nclass _MockIter(object):\n def __init__(self,obj):\n self.obj=iter(obj)\n def __next__(self):\n return next(self.obj)\n \nclass Base(object):\n _mock_return_value=DEFAULT\n _mock_side_effect=None\n def __init__(self,/,*args,**kwargs):\n pass\n \n \n \nclass NonCallableMock(Base):\n ''\n \n def __new__(cls,/,*args,**kw):\n \n \n \n bases=(cls,)\n if not issubclass(cls,AsyncMockMixin):\n \n bound_args=_MOCK_SIG.bind_partial(cls,*args,**kw).arguments\n spec_arg=bound_args.get('spec_set',bound_args.get('spec'))\n if spec_arg and _is_async_obj(spec_arg):\n bases=(AsyncMockMixin,cls)\n new=type(cls.__name__,bases,{'__doc__':cls.__doc__})\n instance=_safe_super(NonCallableMock,cls).__new__(new)\n return instance\n \n \n def __init__(\n self,spec=None ,wraps=None ,name=None ,spec_set=None ,\n parent=None ,_spec_state=None ,_new_name='',_new_parent=None ,\n _spec_as_instance=False ,_eat_self=None ,unsafe=False ,**kwargs\n ):\n if _new_parent is None :\n _new_parent=parent\n \n __dict__=self.__dict__\n __dict__['_mock_parent']=parent\n __dict__['_mock_name']=name\n __dict__['_mock_new_name']=_new_name\n __dict__['_mock_new_parent']=_new_parent\n __dict__['_mock_sealed']=False\n \n if spec_set is not None :\n spec=spec_set\n spec_set=True\n if _eat_self is None :\n _eat_self=parent is not None\n \n self._mock_add_spec(spec,spec_set,_spec_as_instance,_eat_self)\n \n __dict__['_mock_children']={}\n __dict__['_mock_wraps']=wraps\n __dict__['_mock_delegate']=None\n \n __dict__['_mock_called']=False\n __dict__['_mock_call_args']=None\n __dict__['_mock_call_count']=0\n __dict__['_mock_call_args_list']=_CallList()\n __dict__['_mock_mock_calls']=_CallList()\n \n __dict__['method_calls']=_CallList()\n __dict__['_mock_unsafe']=unsafe\n \n if kwargs:\n self.configure_mock(**kwargs)\n \n _safe_super(NonCallableMock,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state\n )\n \n \n def attach_mock(self,mock,attribute):\n ''\n\n\n \n inner_mock=_extract_mock(mock)\n \n inner_mock._mock_parent=None\n inner_mock._mock_new_parent=None\n inner_mock._mock_name=''\n inner_mock._mock_new_name=None\n \n setattr(self,attribute,mock)\n \n \n def mock_add_spec(self,spec,spec_set=False ):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n \n \n def _mock_add_spec(self,spec,spec_set,_spec_as_instance=False ,\n _eat_self=False ):\n _spec_class=None\n _spec_signature=None\n _spec_asyncs=[]\n \n for attr in dir(spec):\n if iscoroutinefunction(getattr(spec,attr,None )):\n _spec_asyncs.append(attr)\n \n if spec is not None and not _is_list(spec):\n if isinstance(spec,type):\n _spec_class=spec\n else :\n _spec_class=type(spec)\n res=_get_signature_object(spec,\n _spec_as_instance,_eat_self)\n _spec_signature=res and res[1]\n \n spec=dir(spec)\n \n __dict__=self.__dict__\n __dict__['_spec_class']=_spec_class\n __dict__['_spec_set']=spec_set\n __dict__['_spec_signature']=_spec_signature\n __dict__['_mock_methods']=spec\n __dict__['_spec_asyncs']=_spec_asyncs\n \n def __get_return_value(self):\n ret=self._mock_return_value\n if self._mock_delegate is not None :\n ret=self._mock_delegate.return_value\n \n if ret is DEFAULT:\n ret=self._get_child_mock(\n _new_parent=self,_new_name='()'\n )\n self.return_value=ret\n return ret\n \n \n def __set_return_value(self,value):\n if self._mock_delegate is not None :\n self._mock_delegate.return_value=value\n else :\n self._mock_return_value=value\n _check_and_set_parent(self,value,None ,'()')\n \n __return_value_doc=\"The value to be returned when the mock is called.\"\n return_value=property(__get_return_value,__set_return_value,\n __return_value_doc)\n \n \n @property\n def __class__(self):\n if self._spec_class is None :\n return type(self)\n return self._spec_class\n \n called=_delegating_property('called')\n call_count=_delegating_property('call_count')\n call_args=_delegating_property('call_args')\n call_args_list=_delegating_property('call_args_list')\n mock_calls=_delegating_property('mock_calls')\n \n \n def __get_side_effect(self):\n delegated=self._mock_delegate\n if delegated is None :\n return self._mock_side_effect\n sf=delegated.side_effect\n if (sf is not None and not callable(sf)\n and not isinstance(sf,_MockIter)and not _is_exception(sf)):\n sf=_MockIter(sf)\n delegated.side_effect=sf\n return sf\n \n def __set_side_effect(self,value):\n value=_try_iter(value)\n delegated=self._mock_delegate\n if delegated is None :\n self._mock_side_effect=value\n else :\n delegated.side_effect=value\n \n side_effect=property(__get_side_effect,__set_side_effect)\n \n \n def reset_mock(self,visited=None ,*,return_value=False ,side_effect=False ):\n ''\n if visited is None :\n visited=[]\n if id(self)in visited:\n return\n visited.append(id(self))\n \n self.called=False\n self.call_args=None\n self.call_count=0\n self.mock_calls=_CallList()\n self.call_args_list=_CallList()\n self.method_calls=_CallList()\n \n if return_value:\n self._mock_return_value=DEFAULT\n if side_effect:\n self._mock_side_effect=None\n \n for child in self._mock_children.values():\n if isinstance(child,_SpecState)or child is _deleted:\n continue\n child.reset_mock(visited,return_value=return_value,side_effect=side_effect)\n \n ret=self._mock_return_value\n if _is_instance_mock(ret)and ret is not self:\n ret.reset_mock(visited)\n \n \n def configure_mock(self,/,**kwargs):\n ''\n\n\n\n\n\n\n \n for arg,val in sorted(kwargs.items(),\n \n \n \n key=lambda entry:entry[0].count('.')):\n args=arg.split('.')\n final=args.pop()\n obj=self\n for entry in args:\n obj=getattr(obj,entry)\n setattr(obj,final,val)\n \n \n def __getattr__(self,name):\n if name in {'_mock_methods','_mock_unsafe'}:\n raise AttributeError(name)\n elif self._mock_methods is not None :\n if name not in self._mock_methods or name in _all_magics:\n raise AttributeError(\"Mock object has no attribute %r\"%name)\n elif _is_magic(name):\n raise AttributeError(name)\n if not self._mock_unsafe:\n if name.startswith(('assert','assret')):\n raise AttributeError(\"Attributes cannot start with 'assert' \"\n \"or 'assret'\")\n \n result=self._mock_children.get(name)\n if result is _deleted:\n raise AttributeError(name)\n elif result is None :\n wraps=None\n if self._mock_wraps is not None :\n \n \n wraps=getattr(self._mock_wraps,name)\n \n result=self._get_child_mock(\n parent=self,name=name,wraps=wraps,_new_name=name,\n _new_parent=self\n )\n self._mock_children[name]=result\n \n elif isinstance(result,_SpecState):\n result=create_autospec(\n result.spec,result.spec_set,result.instance,\n result.parent,result.name\n )\n self._mock_children[name]=result\n \n return result\n \n \n def _extract_mock_name(self):\n _name_list=[self._mock_new_name]\n _parent=self._mock_new_parent\n last=self\n \n dot='.'\n if _name_list ==['()']:\n dot=''\n \n while _parent is not None :\n last=_parent\n \n _name_list.append(_parent._mock_new_name+dot)\n dot='.'\n if _parent._mock_new_name =='()':\n dot=''\n \n _parent=_parent._mock_new_parent\n \n _name_list=list(reversed(_name_list))\n _first=last._mock_name or 'mock'\n if len(_name_list)>1:\n if _name_list[1]not in ('()','().'):\n _first +='.'\n _name_list[0]=_first\n return ''.join(_name_list)\n \n def __repr__(self):\n name=self._extract_mock_name()\n \n name_string=''\n if name not in ('mock','mock.'):\n name_string=' name=%r'%name\n \n spec_string=''\n if self._spec_class is not None :\n spec_string=' spec=%r'\n if self._spec_set:\n spec_string=' spec_set=%r'\n spec_string=spec_string %self._spec_class.__name__\n return \"<%s%s%s id='%s'>\"%(\n type(self).__name__,\n name_string,\n spec_string,\n id(self)\n )\n \n \n def __dir__(self):\n ''\n if not FILTER_DIR:\n return object.__dir__(self)\n \n extras=self._mock_methods or []\n from_type=dir(type(self))\n from_dict=list(self.__dict__)\n from_child_mocks=[\n m_name for m_name,m_value in self._mock_children.items()\n if m_value is not _deleted]\n \n from_type=[e for e in from_type if not e.startswith('_')]\n from_dict=[e for e in from_dict if not e.startswith('_')or\n _is_magic(e)]\n return sorted(set(extras+from_type+from_dict+from_child_mocks))\n \n \n def __setattr__(self,name,value):\n if name in _allowed_names:\n \n return object.__setattr__(self,name,value)\n elif (self._spec_set and self._mock_methods is not None and\n name not in self._mock_methods and\n name not in self.__dict__):\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n elif name in _unsupported_magics:\n msg='Attempting to set unsupported magic method %r.'%name\n raise AttributeError(msg)\n elif name in _all_magics:\n if self._mock_methods is not None and name not in self._mock_methods:\n raise AttributeError(\"Mock object has no attribute '%s'\"%name)\n \n if not _is_instance_mock(value):\n setattr(type(self),name,_get_method(name,value))\n original=value\n value=lambda *args,**kw:original(self,*args,**kw)\n else :\n \n \n _check_and_set_parent(self,value,None ,name)\n setattr(type(self),name,value)\n self._mock_children[name]=value\n elif name =='__class__':\n self._spec_class=value\n return\n else :\n if _check_and_set_parent(self,value,name,name):\n self._mock_children[name]=value\n \n if self._mock_sealed and not hasattr(self,name):\n mock_name=f'{self._extract_mock_name()}.{name}'\n raise AttributeError(f'Cannot set {mock_name}')\n \n return object.__setattr__(self,name,value)\n \n \n def __delattr__(self,name):\n if name in _all_magics and name in type(self).__dict__:\n delattr(type(self),name)\n if name not in self.__dict__:\n \n \n return\n \n obj=self._mock_children.get(name,_missing)\n if name in self.__dict__:\n _safe_super(NonCallableMock,self).__delattr__(name)\n elif obj is _deleted:\n raise AttributeError(name)\n if obj is not _missing:\n del self._mock_children[name]\n self._mock_children[name]=_deleted\n \n \n def _format_mock_call_signature(self,args,kwargs):\n name=self._mock_name or 'mock'\n return _format_call_signature(name,args,kwargs)\n \n \n def _format_mock_failure_message(self,args,kwargs,action='call'):\n message='expected %s not found.\\nExpected: %s\\nActual: %s'\n expected_string=self._format_mock_call_signature(args,kwargs)\n call_args=self.call_args\n actual_string=self._format_mock_call_signature(*call_args)\n return message %(action,expected_string,actual_string)\n \n \n def _get_call_signature_from_name(self,name):\n ''\n\n\n\n\n\n\n\n\n \n if not name:\n return self._spec_signature\n \n sig=None\n names=name.replace('()','').split('.')\n children=self._mock_children\n \n for name in names:\n child=children.get(name)\n if child is None or isinstance(child,_SpecState):\n break\n else :\n \n \n \n child=_extract_mock(child)\n children=child._mock_children\n sig=child._spec_signature\n \n return sig\n \n \n def _call_matcher(self,_call):\n ''\n\n\n\n\n \n \n if isinstance(_call,tuple)and len(_call)>2:\n sig=self._get_call_signature_from_name(_call[0])\n else :\n sig=self._spec_signature\n \n if sig is not None :\n if len(_call)==2:\n name=''\n args,kwargs=_call\n else :\n name,args,kwargs=_call\n try :\n bound_call=sig.bind(*args,**kwargs)\n return call(name,bound_call.args,bound_call.kwargs)\n except TypeError as e:\n return e.with_traceback(None )\n else :\n return _call\n \n def assert_not_called(self):\n ''\n \n if self.call_count !=0:\n msg=(\"Expected '%s' to not have been called. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n \n def assert_called(self):\n ''\n \n if self.call_count ==0:\n msg=(\"Expected '%s' to have been called.\"%\n (self._mock_name or 'mock'))\n raise AssertionError(msg)\n \n def assert_called_once(self):\n ''\n \n if not self.call_count ==1:\n msg=(\"Expected '%s' to have been called once. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n \n def assert_called_with(self,/,*args,**kwargs):\n ''\n\n\n \n if self.call_args is None :\n expected=self._format_mock_call_signature(args,kwargs)\n actual='not called.'\n error_message=('expected call not found.\\nExpected: %s\\nActual: %s'\n %(expected,actual))\n raise AssertionError(error_message)\n \n def _error_message():\n msg=self._format_mock_failure_message(args,kwargs)\n return msg\n expected=self._call_matcher(_Call((args,kwargs),two=True ))\n actual=self._call_matcher(self.call_args)\n if actual !=expected:\n cause=expected if isinstance(expected,Exception)else None\n raise AssertionError(_error_message())from cause\n \n \n def assert_called_once_with(self,/,*args,**kwargs):\n ''\n \n if not self.call_count ==1:\n msg=(\"Expected '%s' to be called once. Called %s times.%s\"\n %(self._mock_name or 'mock',\n self.call_count,\n self._calls_repr()))\n raise AssertionError(msg)\n return self.assert_called_with(*args,**kwargs)\n \n \n def assert_has_calls(self,calls,any_order=False ):\n ''\n\n\n\n\n\n\n\n \n expected=[self._call_matcher(c)for c in calls]\n cause=next((e for e in expected if isinstance(e,Exception)),None )\n all_calls=_CallList(self._call_matcher(c)for c in self.mock_calls)\n if not any_order:\n if expected not in all_calls:\n if cause is None :\n problem='Calls not found.'\n else :\n problem=('Error processing expected calls.\\n'\n 'Errors: {}').format(\n [e if isinstance(e,Exception)else None\n for e in expected])\n raise AssertionError(\n f'{problem}\\n'\n f'Expected: {_CallList(calls)}'\n f'{self._calls_repr(prefix=\"Actual\").rstrip(\".\")}'\n )from cause\n return\n \n all_calls=list(all_calls)\n \n not_found=[]\n for kall in expected:\n try :\n all_calls.remove(kall)\n except ValueError:\n not_found.append(kall)\n if not_found:\n raise AssertionError(\n '%r does not contain all of %r in its call list, '\n 'found %r instead'%(self._mock_name or 'mock',\n tuple(not_found),all_calls)\n )from cause\n \n \n def assert_any_call(self,/,*args,**kwargs):\n ''\n\n\n\n \n expected=self._call_matcher(_Call((args,kwargs),two=True ))\n cause=expected if isinstance(expected,Exception)else None\n actual=[self._call_matcher(c)for c in self.call_args_list]\n if cause or expected not in _AnyComparer(actual):\n expected_string=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(\n '%s call not found'%expected_string\n )from cause\n \n \n def _get_child_mock(self,/,**kw):\n ''\n\n\n\n\n\n \n _new_name=kw.get(\"_new_name\")\n if _new_name in self.__dict__['_spec_asyncs']:\n return AsyncMock(**kw)\n \n _type=type(self)\n if issubclass(_type,MagicMock)and _new_name in _async_method_magics:\n \n klass=AsyncMock\n elif issubclass(_type,AsyncMockMixin):\n if (_new_name in _all_sync_magics or\n self._mock_methods and _new_name in self._mock_methods):\n \n klass=MagicMock\n else :\n klass=AsyncMock\n elif not issubclass(_type,CallableMixin):\n if issubclass(_type,NonCallableMagicMock):\n klass=MagicMock\n elif issubclass(_type,NonCallableMock):\n klass=Mock\n else :\n klass=_type.__mro__[1]\n \n if self._mock_sealed:\n attribute=\".\"+kw[\"name\"]if \"name\"in kw else \"()\"\n mock_name=self._extract_mock_name()+attribute\n raise AttributeError(mock_name)\n \n return klass(**kw)\n \n \n def _calls_repr(self,prefix=\"Calls\"):\n ''\n\n\n\n\n\n \n if not self.mock_calls:\n return \"\"\n return f\"\\n{prefix}: {safe_repr(self.mock_calls)}.\"\n \n \n_MOCK_SIG=inspect.signature(NonCallableMock.__init__)\n\n\nclass _AnyComparer(list):\n ''\n\n\n \n def __contains__(self,item):\n for _call in self:\n assert len(item)==len(_call)\n if all([\n expected ==actual\n for expected,actual in zip(item,_call)\n ]):\n return True\n return False\n \n \ndef _try_iter(obj):\n if obj is None :\n return obj\n if _is_exception(obj):\n return obj\n if _callable(obj):\n return obj\n try :\n return iter(obj)\n except TypeError:\n \n \n return obj\n \n \nclass CallableMixin(Base):\n\n def __init__(self,spec=None ,side_effect=None ,return_value=DEFAULT,\n wraps=None ,name=None ,spec_set=None ,parent=None ,\n _spec_state=None ,_new_name='',_new_parent=None ,**kwargs):\n self.__dict__['_mock_return_value']=return_value\n _safe_super(CallableMixin,self).__init__(\n spec,wraps,name,spec_set,parent,\n _spec_state,_new_name,_new_parent,**kwargs\n )\n \n self.side_effect=side_effect\n \n \n def _mock_check_sig(self,/,*args,**kwargs):\n \n pass\n \n \n def __call__(self,/,*args,**kwargs):\n \n \n self._mock_check_sig(*args,**kwargs)\n self._increment_mock_call(*args,**kwargs)\n return self._mock_call(*args,**kwargs)\n \n \n def _mock_call(self,/,*args,**kwargs):\n return self._execute_mock_call(*args,**kwargs)\n \n def _increment_mock_call(self,/,*args,**kwargs):\n self.called=True\n self.call_count +=1\n \n \n \n \n _call=_Call((args,kwargs),two=True )\n self.call_args=_call\n self.call_args_list.append(_call)\n \n \n do_method_calls=self._mock_parent is not None\n method_call_name=self._mock_name\n \n \n mock_call_name=self._mock_new_name\n is_a_call=mock_call_name =='()'\n self.mock_calls.append(_Call(('',args,kwargs)))\n \n \n _new_parent=self._mock_new_parent\n while _new_parent is not None :\n \n \n if do_method_calls:\n _new_parent.method_calls.append(_Call((method_call_name,args,kwargs)))\n do_method_calls=_new_parent._mock_parent is not None\n if do_method_calls:\n method_call_name=_new_parent._mock_name+'.'+method_call_name\n \n \n this_mock_call=_Call((mock_call_name,args,kwargs))\n _new_parent.mock_calls.append(this_mock_call)\n \n if _new_parent._mock_new_name:\n if is_a_call:\n dot=''\n else :\n dot='.'\n is_a_call=_new_parent._mock_new_name =='()'\n mock_call_name=_new_parent._mock_new_name+dot+mock_call_name\n \n \n _new_parent=_new_parent._mock_new_parent\n \n def _execute_mock_call(self,/,*args,**kwargs):\n \n \n \n effect=self.side_effect\n if effect is not None :\n if _is_exception(effect):\n raise effect\n elif not _callable(effect):\n result=next(effect)\n if _is_exception(result):\n raise result\n else :\n result=effect(*args,**kwargs)\n \n if result is not DEFAULT:\n return result\n \n if self._mock_return_value is not DEFAULT:\n return self.return_value\n \n if self._mock_wraps is not None :\n return self._mock_wraps(*args,**kwargs)\n \n return self.return_value\n \n \n \nclass Mock(CallableMixin,NonCallableMock):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \ndef _dot_lookup(thing,comp,import_path):\n try :\n return getattr(thing,comp)\n except AttributeError:\n __import__(import_path)\n return getattr(thing,comp)\n \n \ndef _importer(target):\n components=target.split('.')\n import_path=components.pop(0)\n thing=__import__(import_path)\n \n for comp in components:\n import_path +=\".%s\"%comp\n thing=_dot_lookup(thing,comp,import_path)\n return thing\n \n \nclass _patch(object):\n\n attribute_name=None\n _active_patches=[]\n \n def __init__(\n self,getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs\n ):\n if new_callable is not None :\n if new is not DEFAULT:\n raise ValueError(\n \"Cannot use 'new' and 'new_callable' together\"\n )\n if autospec is not None :\n raise ValueError(\n \"Cannot use 'autospec' and 'new_callable' together\"\n )\n \n self.getter=getter\n self.attribute=attribute\n self.new=new\n self.new_callable=new_callable\n self.spec=spec\n self.create=create\n self.has_local=False\n self.spec_set=spec_set\n self.autospec=autospec\n self.kwargs=kwargs\n self.additional_patchers=[]\n \n \n def copy(self):\n patcher=_patch(\n self.getter,self.attribute,self.new,self.spec,\n self.create,self.spec_set,\n self.autospec,self.new_callable,self.kwargs\n )\n patcher.attribute_name=self.attribute_name\n patcher.additional_patchers=[\n p.copy()for p in self.additional_patchers\n ]\n return patcher\n \n \n def __call__(self,func):\n if isinstance(func,type):\n return self.decorate_class(func)\n if inspect.iscoroutinefunction(func):\n return self.decorate_async_callable(func)\n return self.decorate_callable(func)\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n if not attr.startswith(patch.TEST_PREFIX):\n continue\n \n attr_value=getattr(klass,attr)\n if not hasattr(attr_value,\"__call__\"):\n continue\n \n patcher=self.copy()\n setattr(klass,attr,patcher(attr_value))\n return klass\n \n \n @contextlib.contextmanager\n def decoration_helper(self,patched,args,keywargs):\n extra_args=[]\n with contextlib.ExitStack()as exit_stack:\n for patching in patched.patchings:\n arg=exit_stack.enter_context(patching)\n if patching.attribute_name is not None :\n keywargs.update(arg)\n elif patching.new is DEFAULT:\n extra_args.append(arg)\n \n args +=tuple(extra_args)\n yield (args,keywargs)\n \n \n def decorate_callable(self,func):\n \n if hasattr(func,'patchings'):\n func.patchings.append(self)\n return func\n \n @wraps(func)\n def patched(*args,**keywargs):\n with self.decoration_helper(patched,\n args,\n keywargs)as (newargs,newkeywargs):\n return func(*newargs,**newkeywargs)\n \n patched.patchings=[self]\n return patched\n \n \n def decorate_async_callable(self,func):\n \n if hasattr(func,'patchings'):\n func.patchings.append(self)\n return func\n \n @wraps(func)\n async def patched(*args,**keywargs):\n with self.decoration_helper(patched,\n args,\n keywargs)as (newargs,newkeywargs):\n return await func(*newargs,**newkeywargs)\n \n patched.patchings=[self]\n return patched\n \n \n def get_original(self):\n target=self.getter()\n name=self.attribute\n \n original=DEFAULT\n local=False\n \n try :\n original=target.__dict__[name]\n except (AttributeError,KeyError):\n original=getattr(target,name,DEFAULT)\n else :\n local=True\n \n if name in _builtins and isinstance(target,ModuleType):\n self.create=True\n \n if not self.create and original is DEFAULT:\n raise AttributeError(\n \"%s does not have the attribute %r\"%(target,name)\n )\n return original,local\n \n \n def __enter__(self):\n ''\n new,spec,spec_set=self.new,self.spec,self.spec_set\n autospec,kwargs=self.autospec,self.kwargs\n new_callable=self.new_callable\n self.target=self.getter()\n \n \n if spec is False :\n spec=None\n if spec_set is False :\n spec_set=None\n if autospec is False :\n autospec=None\n \n if spec is not None and autospec is not None :\n raise TypeError(\"Can't specify spec and autospec\")\n if ((spec is not None or autospec is not None )and\n spec_set not in (True ,None )):\n raise TypeError(\"Can't provide explicit spec_set *and* spec or autospec\")\n \n original,local=self.get_original()\n \n if new is DEFAULT and autospec is None :\n inherit=False\n if spec is True :\n \n spec=original\n if spec_set is True :\n spec_set=original\n spec=None\n elif spec is not None :\n if spec_set is True :\n spec_set=spec\n spec=None\n elif spec_set is True :\n spec_set=original\n \n if spec is not None or spec_set is not None :\n if original is DEFAULT:\n raise TypeError(\"Can't use 'spec' with create=True\")\n if isinstance(original,type):\n \n inherit=True\n if spec is None and _is_async_obj(original):\n Klass=AsyncMock\n else :\n Klass=MagicMock\n _kwargs={}\n if new_callable is not None :\n Klass=new_callable\n elif spec is not None or spec_set is not None :\n this_spec=spec\n if spec_set is not None :\n this_spec=spec_set\n if _is_list(this_spec):\n not_callable='__call__'not in this_spec\n else :\n not_callable=not callable(this_spec)\n if _is_async_obj(this_spec):\n Klass=AsyncMock\n elif not_callable:\n Klass=NonCallableMagicMock\n \n if spec is not None :\n _kwargs['spec']=spec\n if spec_set is not None :\n _kwargs['spec_set']=spec_set\n \n \n if (isinstance(Klass,type)and\n issubclass(Klass,NonCallableMock)and self.attribute):\n _kwargs['name']=self.attribute\n \n _kwargs.update(kwargs)\n new=Klass(**_kwargs)\n \n if inherit and _is_instance_mock(new):\n \n \n this_spec=spec\n if spec_set is not None :\n this_spec=spec_set\n if (not _is_list(this_spec)and not\n _instance_callable(this_spec)):\n Klass=NonCallableMagicMock\n \n _kwargs.pop('name')\n new.return_value=Klass(_new_parent=new,_new_name='()',\n **_kwargs)\n elif autospec is not None :\n \n \n \n if new is not DEFAULT:\n raise TypeError(\n \"autospec creates the mock for you. Can't specify \"\n \"autospec and new.\"\n )\n if original is DEFAULT:\n raise TypeError(\"Can't use 'autospec' with create=True\")\n spec_set=bool(spec_set)\n if autospec is True :\n autospec=original\n \n new=create_autospec(autospec,spec_set=spec_set,\n _name=self.attribute,**kwargs)\n elif kwargs:\n \n \n raise TypeError(\"Can't pass kwargs to a mock we aren't creating\")\n \n new_attr=new\n \n self.temp_original=original\n self.is_local=local\n self._exit_stack=contextlib.ExitStack()\n try :\n setattr(self.target,self.attribute,new_attr)\n if self.attribute_name is not None :\n extra_args={}\n if self.new is DEFAULT:\n extra_args[self.attribute_name]=new\n for patching in self.additional_patchers:\n arg=self._exit_stack.enter_context(patching)\n if patching.new is DEFAULT:\n extra_args.update(arg)\n return extra_args\n \n return new\n except :\n if not self.__exit__(*sys.exc_info()):\n raise\n \n def __exit__(self,*exc_info):\n ''\n if self.is_local and self.temp_original is not DEFAULT:\n setattr(self.target,self.attribute,self.temp_original)\n else :\n delattr(self.target,self.attribute)\n if not self.create and (not hasattr(self.target,self.attribute)or\n self.attribute in ('__doc__','__module__',\n '__defaults__','__annotations__',\n '__kwdefaults__')):\n \n setattr(self.target,self.attribute,self.temp_original)\n \n del self.temp_original\n del self.is_local\n del self.target\n exit_stack=self._exit_stack\n del self._exit_stack\n return exit_stack.__exit__(*exc_info)\n \n \n def start(self):\n ''\n result=self.__enter__()\n self._active_patches.append(self)\n return result\n \n \n def stop(self):\n ''\n try :\n self._active_patches.remove(self)\n except ValueError:\n \n return None\n \n return self.__exit__(None ,None ,None )\n \n \n \ndef _get_target(target):\n try :\n target,attribute=target.rsplit('.',1)\n except (TypeError,ValueError):\n raise TypeError(\"Need a valid target to patch. You supplied: %r\"%\n (target,))\n getter=lambda :_importer(target)\n return getter,attribute\n \n \ndef _patch_object(\ntarget,attribute,new=DEFAULT,spec=None ,\ncreate=False ,spec_set=None ,autospec=None ,\nnew_callable=None ,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n if type(target)is str:\n raise TypeError(\n f\"{target!r} must be the actual object to be patched, not a str\"\n )\n getter=lambda :target\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs\n )\n \n \ndef _patch_multiple(target,spec=None ,create=False ,spec_set=None ,\nautospec=None ,new_callable=None ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if type(target)is str:\n getter=lambda :_importer(target)\n else :\n getter=lambda :target\n \n if not kwargs:\n raise ValueError(\n 'Must supply at least one keyword argument with patch.multiple'\n )\n \n items=list(kwargs.items())\n attribute,new=items[0]\n patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n patcher.attribute_name=attribute\n for attribute,new in items[1:]:\n this_patcher=_patch(\n getter,attribute,new,spec,create,spec_set,\n autospec,new_callable,{}\n )\n this_patcher.attribute_name=attribute\n patcher.additional_patchers.append(this_patcher)\n return patcher\n \n \ndef patch(\ntarget,new=DEFAULT,spec=None ,create=False ,\nspec_set=None ,autospec=None ,new_callable=None ,**kwargs\n):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n getter,attribute=_get_target(target)\n return _patch(\n getter,attribute,new,spec,create,\n spec_set,autospec,new_callable,kwargs\n )\n \n \nclass _patch_dict(object):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n def __init__(self,in_dict,values=(),clear=False ,**kwargs):\n self.in_dict=in_dict\n \n self.values=dict(values)\n self.values.update(kwargs)\n self.clear=clear\n self._original=None\n \n \n def __call__(self,f):\n if isinstance(f,type):\n return self.decorate_class(f)\n @wraps(f)\n def _inner(*args,**kw):\n self._patch_dict()\n try :\n return f(*args,**kw)\n finally :\n self._unpatch_dict()\n \n return _inner\n \n \n def decorate_class(self,klass):\n for attr in dir(klass):\n attr_value=getattr(klass,attr)\n if (attr.startswith(patch.TEST_PREFIX)and\n hasattr(attr_value,\"__call__\")):\n decorator=_patch_dict(self.in_dict,self.values,self.clear)\n decorated=decorator(attr_value)\n setattr(klass,attr,decorated)\n return klass\n \n \n def __enter__(self):\n ''\n self._patch_dict()\n return self.in_dict\n \n \n def _patch_dict(self):\n values=self.values\n if isinstance(self.in_dict,str):\n self.in_dict=_importer(self.in_dict)\n in_dict=self.in_dict\n clear=self.clear\n \n try :\n original=in_dict.copy()\n except AttributeError:\n \n \n original={}\n for key in in_dict:\n original[key]=in_dict[key]\n self._original=original\n \n if clear:\n _clear_dict(in_dict)\n \n try :\n in_dict.update(values)\n except AttributeError:\n \n for key in values:\n in_dict[key]=values[key]\n \n \n def _unpatch_dict(self):\n in_dict=self.in_dict\n original=self._original\n \n _clear_dict(in_dict)\n \n try :\n in_dict.update(original)\n except AttributeError:\n for key in original:\n in_dict[key]=original[key]\n \n \n def __exit__(self,*args):\n ''\n if self._original is not None :\n self._unpatch_dict()\n return False\n \n \n def start(self):\n ''\n result=self.__enter__()\n _patch._active_patches.append(self)\n return result\n \n \n def stop(self):\n ''\n try :\n _patch._active_patches.remove(self)\n except ValueError:\n \n return None\n \n return self.__exit__(None ,None ,None )\n \n \ndef _clear_dict(in_dict):\n try :\n in_dict.clear()\n except AttributeError:\n keys=list(in_dict)\n for key in keys:\n del in_dict[key]\n \n \ndef _patch_stopall():\n ''\n for patch in reversed(_patch._active_patches):\n patch.stop()\n \n \npatch.object=_patch_object\npatch.dict=_patch_dict\npatch.multiple=_patch_multiple\npatch.stopall=_patch_stopall\npatch.TEST_PREFIX='test'\n\nmagic_methods=(\n\"lt le gt ge eq ne \"\n\"getitem setitem delitem \"\n\"len contains iter \"\n\"hash str sizeof \"\n\"enter exit \"\n\n\n\"divmod rdivmod neg pos abs invert \"\n\"complex int float index \"\n\"round trunc floor ceil \"\n\"bool next \"\n\"fspath \"\n\"aiter \"\n)\n\nnumerics=(\n\"add sub mul matmul div floordiv mod lshift rshift and xor or pow truediv\"\n)\ninplace=' '.join('i%s'%n for n in numerics.split())\nright=' '.join('r%s'%n for n in numerics.split())\n\n\n\n\n\n_non_defaults={\n'__get__','__set__','__delete__','__reversed__','__missing__',\n'__reduce__','__reduce_ex__','__getinitargs__','__getnewargs__',\n'__getstate__','__setstate__','__getformat__','__setformat__',\n'__repr__','__dir__','__subclasses__','__format__',\n'__getnewargs_ex__',\n}\n\n\ndef _get_method(name,func):\n ''\n def method(self,/,*args,**kw):\n return func(self,*args,**kw)\n method.__name__=name\n return method\n \n \n_magics={\n'__%s__'%method for method in\n' '.join([magic_methods,numerics,inplace,right]).split()\n}\n\n\n_async_method_magics={\"__aenter__\",\"__aexit__\",\"__anext__\"}\n\n_sync_async_magics={\"__aiter__\"}\n_async_magics=_async_method_magics |_sync_async_magics\n\n_all_sync_magics=_magics |_non_defaults\n_all_magics=_all_sync_magics |_async_magics\n\n_unsupported_magics={\n'__getattr__','__setattr__',\n'__init__','__new__','__prepare__',\n'__instancecheck__','__subclasscheck__',\n'__del__'\n}\n\n_calculate_return_value={\n'__hash__':lambda self:object.__hash__(self),\n'__str__':lambda self:object.__str__(self),\n'__sizeof__':lambda self:object.__sizeof__(self),\n'__fspath__':lambda self:f\"{type(self).__name__}/{self._extract_mock_name()}/{id(self)}\",\n}\n\n_return_values={\n'__lt__':NotImplemented,\n'__gt__':NotImplemented,\n'__le__':NotImplemented,\n'__ge__':NotImplemented,\n'__int__':1,\n'__contains__':False ,\n'__len__':0,\n'__exit__':False ,\n'__complex__':1j,\n'__float__':1.0,\n'__bool__':True ,\n'__index__':1,\n'__aexit__':False ,\n}\n\n\ndef _get_eq(self):\n def __eq__(other):\n ret_val=self.__eq__._mock_return_value\n if ret_val is not DEFAULT:\n return ret_val\n if self is other:\n return True\n return NotImplemented\n return __eq__\n \ndef _get_ne(self):\n def __ne__(other):\n if self.__ne__._mock_return_value is not DEFAULT:\n return DEFAULT\n if self is other:\n return False\n return NotImplemented\n return __ne__\n \ndef _get_iter(self):\n def __iter__():\n ret_val=self.__iter__._mock_return_value\n if ret_val is DEFAULT:\n return iter([])\n \n \n return iter(ret_val)\n return __iter__\n \ndef _get_async_iter(self):\n def __aiter__():\n ret_val=self.__aiter__._mock_return_value\n if ret_val is DEFAULT:\n return _AsyncIterator(iter([]))\n return _AsyncIterator(iter(ret_val))\n return __aiter__\n \n_side_effect_methods={\n'__eq__':_get_eq,\n'__ne__':_get_ne,\n'__iter__':_get_iter,\n'__aiter__':_get_async_iter\n}\n\n\n\ndef _set_return_value(mock,method,name):\n fixed=_return_values.get(name,DEFAULT)\n if fixed is not DEFAULT:\n method.return_value=fixed\n return\n \n return_calculator=_calculate_return_value.get(name)\n if return_calculator is not None :\n return_value=return_calculator(mock)\n method.return_value=return_value\n return\n \n side_effector=_side_effect_methods.get(name)\n if side_effector is not None :\n method.side_effect=side_effector(mock)\n \n \n \nclass MagicMixin(Base):\n def __init__(self,/,*args,**kw):\n self._mock_set_magics()\n _safe_super(MagicMixin,self).__init__(*args,**kw)\n self._mock_set_magics()\n \n \n def _mock_set_magics(self):\n orig_magics=_magics |_async_method_magics\n these_magics=orig_magics\n \n if getattr(self,\"_mock_methods\",None )is not None :\n these_magics=orig_magics.intersection(self._mock_methods)\n \n remove_magics=set()\n remove_magics=orig_magics -these_magics\n \n for entry in remove_magics:\n if entry in type(self).__dict__:\n \n delattr(self,entry)\n \n \n these_magics=these_magics -set(type(self).__dict__)\n \n _type=type(self)\n for entry in these_magics:\n setattr(_type,entry,MagicProxy(entry,self))\n \n \n \nclass NonCallableMagicMock(MagicMixin,NonCallableMock):\n ''\n def mock_add_spec(self,spec,spec_set=False ):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \nclass AsyncMagicMixin(MagicMixin):\n def __init__(self,/,*args,**kw):\n self._mock_set_magics()\n _safe_super(AsyncMagicMixin,self).__init__(*args,**kw)\n self._mock_set_magics()\n \nclass MagicMock(MagicMixin,Mock):\n ''\n\n\n\n\n\n\n\n\n \n def mock_add_spec(self,spec,spec_set=False ):\n ''\n\n\n\n \n self._mock_add_spec(spec,spec_set)\n self._mock_set_magics()\n \n \n \nclass MagicProxy(Base):\n def __init__(self,name,parent):\n self.name=name\n self.parent=parent\n \n def create_mock(self):\n entry=self.name\n parent=self.parent\n m=parent._get_child_mock(name=entry,_new_name=entry,\n _new_parent=parent)\n setattr(parent,entry,m)\n _set_return_value(parent,m,entry)\n return m\n \n def __get__(self,obj,_type=None ):\n return self.create_mock()\n \n \nclass AsyncMockMixin(Base):\n await_count=_delegating_property('await_count')\n await_args=_delegating_property('await_args')\n await_args_list=_delegating_property('await_args_list')\n \n def __init__(self,/,*args,**kwargs):\n super().__init__(*args,**kwargs)\n \n \n \n \n \n \n self.__dict__['_is_coroutine']=asyncio.coroutines._is_coroutine\n self.__dict__['_mock_await_count']=0\n self.__dict__['_mock_await_args']=None\n self.__dict__['_mock_await_args_list']=_CallList()\n code_mock=NonCallableMock(spec_set=CodeType)\n code_mock.co_flags=inspect.CO_COROUTINE\n self.__dict__['__code__']=code_mock\n \n async def _execute_mock_call(self,/,*args,**kwargs):\n \n \n \n _call=_Call((args,kwargs),two=True )\n self.await_count +=1\n self.await_args=_call\n self.await_args_list.append(_call)\n \n effect=self.side_effect\n if effect is not None :\n if _is_exception(effect):\n raise effect\n elif not _callable(effect):\n try :\n result=next(effect)\n except StopIteration:\n \n \n raise StopAsyncIteration\n if _is_exception(result):\n raise result\n elif iscoroutinefunction(effect):\n result=await effect(*args,**kwargs)\n else :\n result=effect(*args,**kwargs)\n \n if result is not DEFAULT:\n return result\n \n if self._mock_return_value is not DEFAULT:\n return self.return_value\n \n if self._mock_wraps is not None :\n if iscoroutinefunction(self._mock_wraps):\n return await self._mock_wraps(*args,**kwargs)\n return self._mock_wraps(*args,**kwargs)\n \n return self.return_value\n \n def assert_awaited(self):\n ''\n\n \n if self.await_count ==0:\n msg=f\"Expected {self._mock_name or 'mock'} to have been awaited.\"\n raise AssertionError(msg)\n \n def assert_awaited_once(self):\n ''\n\n \n if not self.await_count ==1:\n msg=(f\"Expected {self._mock_name or 'mock'} to have been awaited once.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n \n def assert_awaited_with(self,/,*args,**kwargs):\n ''\n\n \n if self.await_args is None :\n expected=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(f'Expected await: {expected}\\nNot awaited')\n \n def _error_message():\n msg=self._format_mock_failure_message(args,kwargs,action='await')\n return msg\n \n expected=self._call_matcher(_Call((args,kwargs),two=True ))\n actual=self._call_matcher(self.await_args)\n if actual !=expected:\n cause=expected if isinstance(expected,Exception)else None\n raise AssertionError(_error_message())from cause\n \n def assert_awaited_once_with(self,/,*args,**kwargs):\n ''\n\n\n \n if not self.await_count ==1:\n msg=(f\"Expected {self._mock_name or 'mock'} to have been awaited once.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n return self.assert_awaited_with(*args,**kwargs)\n \n def assert_any_await(self,/,*args,**kwargs):\n ''\n\n \n expected=self._call_matcher(_Call((args,kwargs),two=True ))\n cause=expected if isinstance(expected,Exception)else None\n actual=[self._call_matcher(c)for c in self.await_args_list]\n if cause or expected not in _AnyComparer(actual):\n expected_string=self._format_mock_call_signature(args,kwargs)\n raise AssertionError(\n '%s await not found'%expected_string\n )from cause\n \n def assert_has_awaits(self,calls,any_order=False ):\n ''\n\n\n\n\n\n\n\n\n\n \n expected=[self._call_matcher(c)for c in calls]\n cause=next((e for e in expected if isinstance(e,Exception)),None )\n all_awaits=_CallList(self._call_matcher(c)for c in self.await_args_list)\n if not any_order:\n if expected not in all_awaits:\n if cause is None :\n problem='Awaits not found.'\n else :\n problem=('Error processing expected awaits.\\n'\n 'Errors: {}').format(\n [e if isinstance(e,Exception)else None\n for e in expected])\n raise AssertionError(\n f'{problem}\\n'\n f'Expected: {_CallList(calls)}\\n'\n f'Actual: {self.await_args_list}'\n )from cause\n return\n \n all_awaits=list(all_awaits)\n \n not_found=[]\n for kall in expected:\n try :\n all_awaits.remove(kall)\n except ValueError:\n not_found.append(kall)\n if not_found:\n raise AssertionError(\n '%r not all found in await list'%(tuple(not_found),)\n )from cause\n \n def assert_not_awaited(self):\n ''\n\n \n if self.await_count !=0:\n msg=(f\"Expected {self._mock_name or 'mock'} to not have been awaited.\"\n f\" Awaited {self.await_count} times.\")\n raise AssertionError(msg)\n \n def reset_mock(self,/,*args,**kwargs):\n ''\n\n \n super().reset_mock(*args,**kwargs)\n self.await_count=0\n self.await_args=None\n self.await_args_list=_CallList()\n \n \nclass AsyncMock(AsyncMockMixin,AsyncMagicMixin,Mock):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n \n \nclass _ANY(object):\n ''\n \n def __eq__(self,other):\n return True\n \n def __ne__(self,other):\n return False\n \n def __repr__(self):\n return ''\n \nANY=_ANY()\n\n\n\ndef _format_call_signature(name,args,kwargs):\n message='%s(%%s)'%name\n formatted_args=''\n args_string=', '.join([repr(arg)for arg in args])\n kwargs_string=', '.join([\n '%s=%r'%(key,value)for key,value in kwargs.items()\n ])\n if args_string:\n formatted_args=args_string\n if kwargs_string:\n if formatted_args:\n formatted_args +=', '\n formatted_args +=kwargs_string\n \n return message %formatted_args\n \n \n \nclass _Call(tuple):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n def __new__(cls,value=(),name='',parent=None ,two=False ,\n from_kall=True ):\n args=()\n kwargs={}\n _len=len(value)\n if _len ==3:\n name,args,kwargs=value\n elif _len ==2:\n first,second=value\n if isinstance(first,str):\n name=first\n if isinstance(second,tuple):\n args=second\n else :\n kwargs=second\n else :\n args,kwargs=first,second\n elif _len ==1:\n value,=value\n if isinstance(value,str):\n name=value\n elif isinstance(value,tuple):\n args=value\n else :\n kwargs=value\n \n if two:\n return tuple.__new__(cls,(args,kwargs))\n \n return tuple.__new__(cls,(name,args,kwargs))\n \n \n def __init__(self,value=(),name=None ,parent=None ,two=False ,\n from_kall=True ):\n self._mock_name=name\n self._mock_parent=parent\n self._mock_from_kall=from_kall\n \n \n def __eq__(self,other):\n try :\n len_other=len(other)\n except TypeError:\n return NotImplemented\n \n self_name=''\n if len(self)==2:\n self_args,self_kwargs=self\n else :\n self_name,self_args,self_kwargs=self\n \n if (getattr(self,'_mock_parent',None )and getattr(other,'_mock_parent',None )\n and self._mock_parent !=other._mock_parent):\n return False\n \n other_name=''\n if len_other ==0:\n other_args,other_kwargs=(),{}\n elif len_other ==3:\n other_name,other_args,other_kwargs=other\n elif len_other ==1:\n value,=other\n if isinstance(value,tuple):\n other_args=value\n other_kwargs={}\n elif isinstance(value,str):\n other_name=value\n other_args,other_kwargs=(),{}\n else :\n other_args=()\n other_kwargs=value\n elif len_other ==2:\n \n first,second=other\n if isinstance(first,str):\n other_name=first\n if isinstance(second,tuple):\n other_args,other_kwargs=second,{}\n else :\n other_args,other_kwargs=(),second\n else :\n other_args,other_kwargs=first,second\n else :\n return False\n \n if self_name and other_name !=self_name:\n return False\n \n \n return (other_args,other_kwargs)==(self_args,self_kwargs)\n \n \n __ne__=object.__ne__\n \n \n def __call__(self,/,*args,**kwargs):\n if self._mock_name is None :\n return _Call(('',args,kwargs),name='()')\n \n name=self._mock_name+'()'\n return _Call((self._mock_name,args,kwargs),name=name,parent=self)\n \n \n def __getattr__(self,attr):\n if self._mock_name is None :\n return _Call(name=attr,from_kall=False )\n name='%s.%s'%(self._mock_name,attr)\n return _Call(name=name,parent=self,from_kall=False )\n \n \n def __getattribute__(self,attr):\n if attr in tuple.__dict__:\n raise AttributeError\n return tuple.__getattribute__(self,attr)\n \n \n def _get_call_arguments(self):\n if len(self)==2:\n args,kwargs=self\n else :\n name,args,kwargs=self\n \n return args,kwargs\n \n @property\n def args(self):\n return self._get_call_arguments()[0]\n \n @property\n def kwargs(self):\n return self._get_call_arguments()[1]\n \n def __repr__(self):\n if not self._mock_from_kall:\n name=self._mock_name or 'call'\n if name.startswith('()'):\n name='call%s'%name\n return name\n \n if len(self)==2:\n name='call'\n args,kwargs=self\n else :\n name,args,kwargs=self\n if not name:\n name='call'\n elif not name.startswith('()'):\n name='call.%s'%name\n else :\n name='call%s'%name\n return _format_call_signature(name,args,kwargs)\n \n \n def call_list(self):\n ''\n\n \n vals=[]\n thing=self\n while thing is not None :\n if thing._mock_from_kall:\n vals.append(thing)\n thing=thing._mock_parent\n return _CallList(reversed(vals))\n \n \ncall=_Call(from_kall=False )\n\n\ndef create_autospec(spec,spec_set=False ,instance=False ,_parent=None ,\n_name=None ,**kwargs):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if _is_list(spec):\n \n \n spec=type(spec)\n \n is_type=isinstance(spec,type)\n is_async_func=_is_async_func(spec)\n _kwargs={'spec':spec}\n if spec_set:\n _kwargs={'spec_set':spec}\n elif spec is None :\n \n _kwargs={}\n if _kwargs and instance:\n _kwargs['_spec_as_instance']=True\n \n _kwargs.update(kwargs)\n \n Klass=MagicMock\n if inspect.isdatadescriptor(spec):\n \n \n _kwargs={}\n elif is_async_func:\n if instance:\n raise RuntimeError(\"Instance can not be True when create_autospec \"\n \"is mocking an async function\")\n Klass=AsyncMock\n elif not _callable(spec):\n Klass=NonCallableMagicMock\n elif is_type and instance and not _instance_callable(spec):\n Klass=NonCallableMagicMock\n \n _name=_kwargs.pop('name',_name)\n \n _new_name=_name\n if _parent is None :\n \n _new_name=''\n \n mock=Klass(parent=_parent,_new_parent=_parent,_new_name=_new_name,\n name=_name,**_kwargs)\n \n if isinstance(spec,FunctionTypes):\n \n \n mock=_set_signature(mock,spec)\n if is_async_func:\n _setup_async_mock(mock)\n else :\n _check_signature(spec,mock,is_type,instance)\n \n if _parent is not None and not instance:\n _parent._mock_children[_name]=mock\n \n if is_type and not instance and 'return_value'not in kwargs:\n mock.return_value=create_autospec(spec,spec_set,instance=True ,\n _name='()',_parent=mock)\n \n for entry in dir(spec):\n if _is_magic(entry):\n \n continue\n \n \n \n \n \n \n \n \n \n \n try :\n original=getattr(spec,entry)\n except AttributeError:\n continue\n \n kwargs={'spec':original}\n if spec_set:\n kwargs={'spec_set':original}\n \n if not isinstance(original,FunctionTypes):\n new=_SpecState(original,spec_set,mock,entry,instance)\n mock._mock_children[entry]=new\n else :\n parent=mock\n if isinstance(spec,FunctionTypes):\n parent=mock.mock\n \n skipfirst=_must_skip(spec,entry,is_type)\n kwargs['_eat_self']=skipfirst\n if iscoroutinefunction(original):\n child_klass=AsyncMock\n else :\n child_klass=MagicMock\n new=child_klass(parent=parent,name=entry,_new_name=entry,\n _new_parent=parent,\n **kwargs)\n mock._mock_children[entry]=new\n _check_signature(original,new,skipfirst=skipfirst)\n \n \n \n \n \n if isinstance(new,FunctionTypes):\n setattr(mock,entry,new)\n \n return mock\n \n \ndef _must_skip(spec,entry,is_type):\n ''\n\n\n \n if not isinstance(spec,type):\n if entry in getattr(spec,'__dict__',{}):\n \n return False\n spec=spec.__class__\n \n for klass in spec.__mro__:\n result=klass.__dict__.get(entry,DEFAULT)\n if result is DEFAULT:\n continue\n if isinstance(result,(staticmethod,classmethod)):\n return False\n elif isinstance(result,FunctionTypes):\n \n \n return is_type\n else :\n return False\n \n \n return is_type\n \n \nclass _SpecState(object):\n\n def __init__(self,spec,spec_set=False ,parent=None ,\n name=None ,ids=None ,instance=False ):\n self.spec=spec\n self.ids=ids\n self.spec_set=spec_set\n self.parent=parent\n self.instance=instance\n self.name=name\n \n \nFunctionTypes=(\n\ntype(create_autospec),\n\ntype(ANY.__eq__),\n)\n\n\nfile_spec=None\n\n\ndef _to_stream(read_data):\n if isinstance(read_data,bytes):\n return io.BytesIO(read_data)\n else :\n return io.StringIO(read_data)\n \n \ndef mock_open(mock=None ,read_data=''):\n ''\n\n\n\n\n\n\n\n\n\n \n _read_data=_to_stream(read_data)\n _state=[_read_data,None ]\n \n def _readlines_side_effect(*args,**kwargs):\n if handle.readlines.return_value is not None :\n return handle.readlines.return_value\n return _state[0].readlines(*args,**kwargs)\n \n def _read_side_effect(*args,**kwargs):\n if handle.read.return_value is not None :\n return handle.read.return_value\n return _state[0].read(*args,**kwargs)\n \n def _readline_side_effect(*args,**kwargs):\n yield from _iter_side_effect()\n while True :\n yield _state[0].readline(*args,**kwargs)\n \n def _iter_side_effect():\n if handle.readline.return_value is not None :\n while True :\n yield handle.readline.return_value\n for line in _state[0]:\n yield line\n \n def _next_side_effect():\n if handle.readline.return_value is not None :\n return handle.readline.return_value\n return next(_state[0])\n \n global file_spec\n if file_spec is None :\n import _io\n file_spec=list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))\n \n if mock is None :\n mock=MagicMock(name='open',spec=open)\n \n handle=MagicMock(spec=file_spec)\n handle.__enter__.return_value=handle\n \n handle.write.return_value=None\n handle.read.return_value=None\n handle.readline.return_value=None\n handle.readlines.return_value=None\n \n handle.read.side_effect=_read_side_effect\n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n handle.readlines.side_effect=_readlines_side_effect\n handle.__iter__.side_effect=_iter_side_effect\n handle.__next__.side_effect=_next_side_effect\n \n def reset_data(*args,**kwargs):\n _state[0]=_to_stream(read_data)\n if handle.readline.side_effect ==_state[1]:\n \n _state[1]=_readline_side_effect()\n handle.readline.side_effect=_state[1]\n return DEFAULT\n \n mock.side_effect=reset_data\n mock.return_value=handle\n return mock\n \n \nclass PropertyMock(Mock):\n ''\n\n\n\n\n\n\n \n def _get_child_mock(self,/,**kwargs):\n return MagicMock(**kwargs)\n \n def __get__(self,obj,obj_type=None ):\n return self()\n def __set__(self,obj,val):\n self(val)\n \n \ndef seal(mock):\n ''\n\n\n\n\n\n\n\n \n mock._mock_sealed=True\n for attr in dir(mock):\n try :\n m=getattr(mock,attr)\n except AttributeError:\n continue\n if not isinstance(m,NonCallableMock):\n continue\n if m._mock_new_parent is mock:\n seal(m)\n \n \nclass _AsyncIterator:\n ''\n\n \n def __init__(self,iterator):\n self.iterator=iterator\n code_mock=NonCallableMock(spec_set=CodeType)\n code_mock.co_flags=inspect.CO_ITERABLE_COROUTINE\n self.__dict__['__code__']=code_mock\n \n async def __anext__(self):\n try :\n return next(self.iterator)\n except StopIteration:\n pass\n raise StopAsyncIteration\n", ["_io", "asyncio", "builtins", "contextlib", "functools", "inspect", "io", "pprint", "sys", "types", "unittest.util"]], "unittest.result": [".py", "''\n\nimport io\nimport sys\nimport traceback\n\nfrom . import util\nfrom functools import wraps\n\n__unittest=True\n\ndef failfast(method):\n @wraps(method)\n def inner(self,*args,**kw):\n if getattr(self,'failfast',False ):\n self.stop()\n return method(self,*args,**kw)\n return inner\n \nSTDOUT_LINE='\\nStdout:\\n%s'\nSTDERR_LINE='\\nStderr:\\n%s'\n\n\nclass TestResult(object):\n ''\n\n\n\n\n\n\n\n\n \n _previousTestClass=None\n _testRunEntered=False\n _moduleSetUpFailed=False\n def __init__(self,stream=None ,descriptions=None ,verbosity=None ):\n self.failfast=False\n self.failures=[]\n self.errors=[]\n self.testsRun=0\n self.skipped=[]\n self.expectedFailures=[]\n self.unexpectedSuccesses=[]\n self.shouldStop=False\n self.buffer=False\n self.tb_locals=False\n self._stdout_buffer=None\n self._stderr_buffer=None\n self._original_stdout=sys.stdout\n self._original_stderr=sys.stderr\n self._mirrorOutput=False\n \n def printErrors(self):\n ''\n \n def startTest(self,test):\n ''\n self.testsRun +=1\n self._mirrorOutput=False\n self._setupStdout()\n \n def _setupStdout(self):\n if self.buffer:\n if self._stderr_buffer is None :\n self._stderr_buffer=io.StringIO()\n self._stdout_buffer=io.StringIO()\n sys.stdout=self._stdout_buffer\n sys.stderr=self._stderr_buffer\n \n def startTestRun(self):\n ''\n\n\n \n \n def stopTest(self,test):\n ''\n self._restoreStdout()\n self._mirrorOutput=False\n \n def _restoreStdout(self):\n if self.buffer:\n if self._mirrorOutput:\n output=sys.stdout.getvalue()\n error=sys.stderr.getvalue()\n if output:\n if not output.endswith('\\n'):\n output +='\\n'\n self._original_stdout.write(STDOUT_LINE %output)\n if error:\n if not error.endswith('\\n'):\n error +='\\n'\n self._original_stderr.write(STDERR_LINE %error)\n \n sys.stdout=self._original_stdout\n sys.stderr=self._original_stderr\n self._stdout_buffer.seek(0)\n self._stdout_buffer.truncate()\n self._stderr_buffer.seek(0)\n self._stderr_buffer.truncate()\n \n def stopTestRun(self):\n ''\n\n\n \n \n @failfast\n def addError(self,test,err):\n ''\n\n \n self.errors.append((test,self._exc_info_to_string(err,test)))\n self._mirrorOutput=True\n \n @failfast\n def addFailure(self,test,err):\n ''\n \n self.failures.append((test,self._exc_info_to_string(err,test)))\n self._mirrorOutput=True\n \n def addSubTest(self,test,subtest,err):\n ''\n\n\n \n \n \n if err is not None :\n if getattr(self,'failfast',False ):\n self.stop()\n if issubclass(err[0],test.failureException):\n errors=self.failures\n else :\n errors=self.errors\n errors.append((subtest,self._exc_info_to_string(err,test)))\n self._mirrorOutput=True\n \n def addSuccess(self,test):\n ''\n pass\n \n def addSkip(self,test,reason):\n ''\n self.skipped.append((test,reason))\n \n def addExpectedFailure(self,test,err):\n ''\n self.expectedFailures.append(\n (test,self._exc_info_to_string(err,test)))\n \n @failfast\n def addUnexpectedSuccess(self,test):\n ''\n self.unexpectedSuccesses.append(test)\n \n def wasSuccessful(self):\n ''\n \n \n \n return ((len(self.failures)==len(self.errors)==0)and\n (not hasattr(self,'unexpectedSuccesses')or\n len(self.unexpectedSuccesses)==0))\n \n def stop(self):\n ''\n self.shouldStop=True\n \n def _exc_info_to_string(self,err,test):\n ''\n exctype,value,tb=err\n \n while tb and self._is_relevant_tb_level(tb):\n tb=tb.tb_next\n \n if exctype is test.failureException:\n \n length=self._count_relevant_tb_levels(tb)\n else :\n length=None\n tb_e=traceback.TracebackException(\n exctype,value,tb,limit=length,capture_locals=self.tb_locals)\n msgLines=list(tb_e.format())\n \n if self.buffer:\n output=sys.stdout.getvalue()\n error=sys.stderr.getvalue()\n if output:\n if not output.endswith('\\n'):\n output +='\\n'\n msgLines.append(STDOUT_LINE %output)\n if error:\n if not error.endswith('\\n'):\n error +='\\n'\n msgLines.append(STDERR_LINE %error)\n return ''.join(msgLines)\n \n \n def _is_relevant_tb_level(self,tb):\n return '__unittest'in tb.tb_frame.f_globals\n \n def _count_relevant_tb_levels(self,tb):\n length=0\n while tb and not self._is_relevant_tb_level(tb):\n length +=1\n tb=tb.tb_next\n return length\n \n def __repr__(self):\n return (\"<%s run=%i errors=%i failures=%i>\"%\n (util.strclass(self.__class__),self.testsRun,len(self.errors),\n len(self.failures)))\n", ["functools", "io", "sys", "traceback", "unittest", "unittest.util"]], "unittest.runner": [".py", "''\n\nimport sys\nimport time\nimport warnings\n\nfrom . import result\nfrom .signals import registerResult\n\n__unittest=True\n\n\nclass _WritelnDecorator(object):\n ''\n def __init__(self,stream):\n self.stream=stream\n \n def __getattr__(self,attr):\n if attr in ('stream','__getstate__'):\n raise AttributeError(attr)\n return getattr(self.stream,attr)\n \n def writeln(self,arg=None ):\n if arg:\n self.write(arg)\n self.write('\\n')\n \n \nclass TextTestResult(result.TestResult):\n ''\n\n\n \n separator1='='*70\n separator2='-'*70\n \n def __init__(self,stream,descriptions,verbosity):\n super(TextTestResult,self).__init__(stream,descriptions,verbosity)\n self.stream=stream\n self.showAll=verbosity >1\n self.dots=verbosity ==1\n self.descriptions=descriptions\n \n def getDescription(self,test):\n doc_first_line=test.shortDescription()\n if self.descriptions and doc_first_line:\n return '\\n'.join((str(test),doc_first_line))\n else :\n return str(test)\n \n def startTest(self,test):\n super(TextTestResult,self).startTest(test)\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n \n def addSuccess(self,test):\n super(TextTestResult,self).addSuccess(test)\n if self.showAll:\n self.stream.writeln(\"ok\")\n elif self.dots:\n self.stream.write('.')\n self.stream.flush()\n \n def addError(self,test,err):\n super(TextTestResult,self).addError(test,err)\n if self.showAll:\n self.stream.writeln(\"ERROR\")\n elif self.dots:\n self.stream.write('E')\n self.stream.flush()\n \n def addFailure(self,test,err):\n super(TextTestResult,self).addFailure(test,err)\n if self.showAll:\n self.stream.writeln(\"FAIL\")\n elif self.dots:\n self.stream.write('F')\n self.stream.flush()\n \n def addSkip(self,test,reason):\n super(TextTestResult,self).addSkip(test,reason)\n if self.showAll:\n self.stream.writeln(\"skipped {0!r}\".format(reason))\n elif self.dots:\n self.stream.write(\"s\")\n self.stream.flush()\n \n def addExpectedFailure(self,test,err):\n super(TextTestResult,self).addExpectedFailure(test,err)\n if self.showAll:\n self.stream.writeln(\"expected failure\")\n elif self.dots:\n self.stream.write(\"x\")\n self.stream.flush()\n \n def addUnexpectedSuccess(self,test):\n super(TextTestResult,self).addUnexpectedSuccess(test)\n if self.showAll:\n self.stream.writeln(\"unexpected success\")\n elif self.dots:\n self.stream.write(\"u\")\n self.stream.flush()\n \n def printErrors(self):\n if self.dots or self.showAll:\n self.stream.writeln()\n self.printErrorList('ERROR',self.errors)\n self.printErrorList('FAIL',self.failures)\n \n def printErrorList(self,flavour,errors):\n for test,err in errors:\n self.stream.writeln(self.separator1)\n self.stream.writeln(\"%s: %s\"%(flavour,self.getDescription(test)))\n self.stream.writeln(self.separator2)\n self.stream.writeln(\"%s\"%err)\n \n \nclass TextTestRunner(object):\n ''\n\n\n\n \n resultclass=TextTestResult\n \n def __init__(self,stream=None ,descriptions=True ,verbosity=1,\n failfast=False ,buffer=False ,resultclass=None ,warnings=None ,\n *,tb_locals=False ):\n ''\n\n\n\n \n if stream is None :\n stream=sys.stderr\n self.stream=_WritelnDecorator(stream)\n self.descriptions=descriptions\n self.verbosity=verbosity\n self.failfast=failfast\n self.buffer=buffer\n self.tb_locals=tb_locals\n self.warnings=warnings\n if resultclass is not None :\n self.resultclass=resultclass\n \n def _makeResult(self):\n return self.resultclass(self.stream,self.descriptions,self.verbosity)\n \n def run(self,test):\n ''\n result=self._makeResult()\n registerResult(result)\n result.failfast=self.failfast\n result.buffer=self.buffer\n result.tb_locals=self.tb_locals\n with warnings.catch_warnings():\n if self.warnings:\n \n warnings.simplefilter(self.warnings)\n \n \n \n \n \n if self.warnings in ['default','always']:\n warnings.filterwarnings('module',\n category=DeprecationWarning,\n message=r'Please use assert\\w+ instead.')\n startTime=time.perf_counter()\n startTestRun=getattr(result,'startTestRun',None )\n if startTestRun is not None :\n startTestRun()\n try :\n test(result)\n finally :\n stopTestRun=getattr(result,'stopTestRun',None )\n if stopTestRun is not None :\n stopTestRun()\n stopTime=time.perf_counter()\n timeTaken=stopTime -startTime\n result.printErrors()\n if hasattr(result,'separator2'):\n self.stream.writeln(result.separator2)\n run=result.testsRun\n self.stream.writeln(\"Ran %d test%s in %.3fs\"%\n (run,run !=1 and \"s\"or \"\",timeTaken))\n self.stream.writeln()\n \n expectedFails=unexpectedSuccesses=skipped=0\n try :\n results=map(len,(result.expectedFailures,\n result.unexpectedSuccesses,\n result.skipped))\n except AttributeError:\n pass\n else :\n expectedFails,unexpectedSuccesses,skipped=results\n \n infos=[]\n if not result.wasSuccessful():\n self.stream.write(\"FAILED\")\n failed,errored=len(result.failures),len(result.errors)\n if failed:\n infos.append(\"failures=%d\"%failed)\n if errored:\n infos.append(\"errors=%d\"%errored)\n else :\n self.stream.write(\"OK\")\n if skipped:\n infos.append(\"skipped=%d\"%skipped)\n if expectedFails:\n infos.append(\"expected failures=%d\"%expectedFails)\n if unexpectedSuccesses:\n infos.append(\"unexpected successes=%d\"%unexpectedSuccesses)\n if infos:\n self.stream.writeln(\" (%s)\"%(\", \".join(infos),))\n else :\n self.stream.write(\"\\n\")\n return result\n", ["sys", "time", "unittest", "unittest.result", "unittest.signals", "warnings"]], "unittest.signals": [".py", "import signal\nimport weakref\n\nfrom functools import wraps\n\n__unittest=True\n\n\nclass _InterruptHandler(object):\n def __init__(self,default_handler):\n self.called=False\n self.original_handler=default_handler\n if isinstance(default_handler,int):\n if default_handler ==signal.SIG_DFL:\n \n default_handler=signal.default_int_handler\n elif default_handler ==signal.SIG_IGN:\n \n \n def default_handler(unused_signum,unused_frame):\n pass\n else :\n raise TypeError(\"expected SIGINT signal handler to be \"\n \"signal.SIG_IGN, signal.SIG_DFL, or a \"\n \"callable object\")\n self.default_handler=default_handler\n \n def __call__(self,signum,frame):\n installed_handler=signal.getsignal(signal.SIGINT)\n if installed_handler is not self:\n \n \n self.default_handler(signum,frame)\n \n if self.called:\n self.default_handler(signum,frame)\n self.called=True\n for result in _results.keys():\n result.stop()\n \n_results=weakref.WeakKeyDictionary()\ndef registerResult(result):\n _results[result]=1\n \ndef removeResult(result):\n return bool(_results.pop(result,None ))\n \n_interrupt_handler=None\ndef installHandler():\n global _interrupt_handler\n if _interrupt_handler is None :\n default_handler=signal.getsignal(signal.SIGINT)\n _interrupt_handler=_InterruptHandler(default_handler)\n signal.signal(signal.SIGINT,_interrupt_handler)\n \n \ndef removeHandler(method=None ):\n if method is not None :\n @wraps(method)\n def inner(*args,**kwargs):\n initial=signal.getsignal(signal.SIGINT)\n removeHandler()\n try :\n return method(*args,**kwargs)\n finally :\n signal.signal(signal.SIGINT,initial)\n return inner\n \n global _interrupt_handler\n if _interrupt_handler is not None :\n signal.signal(signal.SIGINT,_interrupt_handler.original_handler)\n", ["functools", "signal", "weakref"]], "unittest.suite": [".py", "''\n\nimport sys\n\nfrom . import case\nfrom . import util\n\n__unittest=True\n\n\ndef _call_if_exists(parent,attr):\n func=getattr(parent,attr,lambda :None )\n func()\n \n \nclass BaseTestSuite(object):\n ''\n \n _cleanup=True\n \n def __init__(self,tests=()):\n self._tests=[]\n self._removed_tests=0\n self.addTests(tests)\n \n def __repr__(self):\n return \"<%s tests=%s>\"%(util.strclass(self.__class__),list(self))\n \n def __eq__(self,other):\n if not isinstance(other,self.__class__):\n return NotImplemented\n return list(self)==list(other)\n \n def __iter__(self):\n return iter(self._tests)\n \n def countTestCases(self):\n cases=self._removed_tests\n for test in self:\n if test:\n cases +=test.countTestCases()\n return cases\n \n def addTest(self,test):\n \n if not callable(test):\n raise TypeError(\"{} is not callable\".format(repr(test)))\n if isinstance(test,type)and issubclass(test,\n (case.TestCase,TestSuite)):\n raise TypeError(\"TestCases and TestSuites must be instantiated \"\n \"before passing them to addTest()\")\n self._tests.append(test)\n \n def addTests(self,tests):\n if isinstance(tests,str):\n raise TypeError(\"tests must be an iterable of tests, not a string\")\n for test in tests:\n self.addTest(test)\n \n def run(self,result):\n for index,test in enumerate(self):\n if result.shouldStop:\n break\n test(result)\n if self._cleanup:\n self._removeTestAtIndex(index)\n return result\n \n def _removeTestAtIndex(self,index):\n ''\n try :\n test=self._tests[index]\n except TypeError:\n \n pass\n else :\n \n \n if hasattr(test,'countTestCases'):\n self._removed_tests +=test.countTestCases()\n self._tests[index]=None\n \n def __call__(self,*args,**kwds):\n return self.run(*args,**kwds)\n \n def debug(self):\n ''\n for test in self:\n test.debug()\n \n \nclass TestSuite(BaseTestSuite):\n ''\n\n\n\n\n\n\n \n \n def run(self,result,debug=False ):\n topLevel=False\n if getattr(result,'_testRunEntered',False )is False :\n result._testRunEntered=topLevel=True\n \n for index,test in enumerate(self):\n if result.shouldStop:\n break\n \n if _isnotsuite(test):\n self._tearDownPreviousClass(test,result)\n self._handleModuleFixture(test,result)\n self._handleClassSetUp(test,result)\n result._previousTestClass=test.__class__\n \n if (getattr(test.__class__,'_classSetupFailed',False )or\n getattr(result,'_moduleSetUpFailed',False )):\n continue\n \n if not debug:\n test(result)\n else :\n test.debug()\n \n if self._cleanup:\n self._removeTestAtIndex(index)\n \n if topLevel:\n self._tearDownPreviousClass(None ,result)\n self._handleModuleTearDown(result)\n result._testRunEntered=False\n return result\n \n def debug(self):\n ''\n debug=_DebugResult()\n self.run(debug,True )\n \n \n \n def _handleClassSetUp(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None )\n currentClass=test.__class__\n if currentClass ==previousClass:\n return\n if result._moduleSetUpFailed:\n return\n if getattr(currentClass,\"__unittest_skip__\",False ):\n return\n \n try :\n currentClass._classSetupFailed=False\n except TypeError:\n \n \n pass\n \n setUpClass=getattr(currentClass,'setUpClass',None )\n if setUpClass is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n setUpClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n currentClass._classSetupFailed=True\n className=util.strclass(currentClass)\n self._createClassOrModuleLevelException(result,e,\n 'setUpClass',\n className)\n finally :\n _call_if_exists(result,'_restoreStdout')\n if currentClass._classSetupFailed is True :\n currentClass.doClassCleanups()\n if len(currentClass.tearDown_exceptions)>0:\n for exc in currentClass.tearDown_exceptions:\n self._createClassOrModuleLevelException(\n result,exc[1],'setUpClass',className,\n info=exc)\n \n def _get_previous_module(self,result):\n previousModule=None\n previousClass=getattr(result,'_previousTestClass',None )\n if previousClass is not None :\n previousModule=previousClass.__module__\n return previousModule\n \n \n def _handleModuleFixture(self,test,result):\n previousModule=self._get_previous_module(result)\n currentModule=test.__class__.__module__\n if currentModule ==previousModule:\n return\n \n self._handleModuleTearDown(result)\n \n \n result._moduleSetUpFailed=False\n try :\n module=sys.modules[currentModule]\n except KeyError:\n return\n setUpModule=getattr(module,'setUpModule',None )\n if setUpModule is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n setUpModule()\n except Exception as e:\n try :\n case.doModuleCleanups()\n except Exception as exc:\n self._createClassOrModuleLevelException(result,exc,\n 'setUpModule',\n currentModule)\n if isinstance(result,_DebugResult):\n raise\n result._moduleSetUpFailed=True\n self._createClassOrModuleLevelException(result,e,\n 'setUpModule',\n currentModule)\n finally :\n _call_if_exists(result,'_restoreStdout')\n \n def _createClassOrModuleLevelException(self,result,exc,method_name,\n parent,info=None ):\n errorName=f'{method_name} ({parent})'\n self._addClassOrModuleLevelException(result,exc,errorName,info)\n \n def _addClassOrModuleLevelException(self,result,exception,errorName,\n info=None ):\n error=_ErrorHolder(errorName)\n addSkip=getattr(result,'addSkip',None )\n if addSkip is not None and isinstance(exception,case.SkipTest):\n addSkip(error,str(exception))\n else :\n if not info:\n result.addError(error,sys.exc_info())\n else :\n result.addError(error,info)\n \n def _handleModuleTearDown(self,result):\n previousModule=self._get_previous_module(result)\n if previousModule is None :\n return\n if result._moduleSetUpFailed:\n return\n \n try :\n module=sys.modules[previousModule]\n except KeyError:\n return\n \n tearDownModule=getattr(module,'tearDownModule',None )\n if tearDownModule is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n tearDownModule()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n self._createClassOrModuleLevelException(result,e,\n 'tearDownModule',\n previousModule)\n finally :\n _call_if_exists(result,'_restoreStdout')\n try :\n case.doModuleCleanups()\n except Exception as e:\n self._createClassOrModuleLevelException(result,e,\n 'tearDownModule',\n previousModule)\n \n def _tearDownPreviousClass(self,test,result):\n previousClass=getattr(result,'_previousTestClass',None )\n currentClass=test.__class__\n if currentClass ==previousClass:\n return\n if getattr(previousClass,'_classSetupFailed',False ):\n return\n if getattr(result,'_moduleSetUpFailed',False ):\n return\n if getattr(previousClass,\"__unittest_skip__\",False ):\n return\n \n tearDownClass=getattr(previousClass,'tearDownClass',None )\n if tearDownClass is not None :\n _call_if_exists(result,'_setupStdout')\n try :\n tearDownClass()\n except Exception as e:\n if isinstance(result,_DebugResult):\n raise\n className=util.strclass(previousClass)\n self._createClassOrModuleLevelException(result,e,\n 'tearDownClass',\n className)\n finally :\n _call_if_exists(result,'_restoreStdout')\n previousClass.doClassCleanups()\n if len(previousClass.tearDown_exceptions)>0:\n for exc in previousClass.tearDown_exceptions:\n className=util.strclass(previousClass)\n self._createClassOrModuleLevelException(result,exc[1],\n 'tearDownClass',\n className,\n info=exc)\n \n \nclass _ErrorHolder(object):\n ''\n\n\n\n \n \n \n \n \n failureException=None\n \n def __init__(self,description):\n self.description=description\n \n def id(self):\n return self.description\n \n def shortDescription(self):\n return None\n \n def __repr__(self):\n return \"\"%(self.description,)\n \n def __str__(self):\n return self.id()\n \n def run(self,result):\n \n \n pass\n \n def __call__(self,result):\n return self.run(result)\n \n def countTestCases(self):\n return 0\n \ndef _isnotsuite(test):\n ''\n try :\n iter(test)\n except TypeError:\n return True\n return False\n \n \nclass _DebugResult(object):\n ''\n _previousTestClass=None\n _moduleSetUpFailed=False\n shouldStop=False\n", ["sys", "unittest", "unittest.case", "unittest.util"]], "unittest.util": [".py", "''\n\nfrom collections import namedtuple,Counter\nfrom os.path import commonprefix\n\n__unittest=True\n\n_MAX_LENGTH=80\n_PLACEHOLDER_LEN=12\n_MIN_BEGIN_LEN=5\n_MIN_END_LEN=5\n_MIN_COMMON_LEN=5\n_MIN_DIFF_LEN=_MAX_LENGTH -\\\n(_MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\n_PLACEHOLDER_LEN+_MIN_END_LEN)\nassert _MIN_DIFF_LEN >=0\n\ndef _shorten(s,prefixlen,suffixlen):\n skip=len(s)-prefixlen -suffixlen\n if skip >_PLACEHOLDER_LEN:\n s='%s[%d chars]%s'%(s[:prefixlen],skip,s[len(s)-suffixlen:])\n return s\n \ndef _common_shorten_repr(*args):\n args=tuple(map(safe_repr,args))\n maxlen=max(map(len,args))\n if maxlen <=_MAX_LENGTH:\n return args\n \n prefix=commonprefix(args)\n prefixlen=len(prefix)\n \n common_len=_MAX_LENGTH -\\\n (maxlen -prefixlen+_MIN_BEGIN_LEN+_PLACEHOLDER_LEN)\n if common_len >_MIN_COMMON_LEN:\n assert _MIN_BEGIN_LEN+_PLACEHOLDER_LEN+_MIN_COMMON_LEN+\\\n (maxlen -prefixlen)<_MAX_LENGTH\n prefix=_shorten(prefix,_MIN_BEGIN_LEN,common_len)\n return tuple(prefix+s[prefixlen:]for s in args)\n \n prefix=_shorten(prefix,_MIN_BEGIN_LEN,_MIN_COMMON_LEN)\n return tuple(prefix+_shorten(s[prefixlen:],_MIN_DIFF_LEN,_MIN_END_LEN)\n for s in args)\n \ndef safe_repr(obj,short=False ):\n try :\n result=repr(obj)\n except Exception:\n result=object.__repr__(obj)\n if not short or len(result)<_MAX_LENGTH:\n return result\n return result[:_MAX_LENGTH]+' [truncated]...'\n \ndef strclass(cls):\n return \"%s.%s\"%(cls.__module__,cls.__qualname__)\n \ndef sorted_list_difference(expected,actual):\n ''\n\n\n\n\n\n \n i=j=0\n missing=[]\n unexpected=[]\n while True :\n try :\n e=expected[i]\n a=actual[j]\n if e a:\n unexpected.append(a)\n j +=1\n while actual[j]==a:\n j +=1\n else :\n i +=1\n try :\n while expected[i]==e:\n i +=1\n finally :\n j +=1\n while actual[j]==a:\n j +=1\n except IndexError:\n missing.extend(expected[i:])\n unexpected.extend(actual[j:])\n break\n return missing,unexpected\n \n \ndef unorderable_list_difference(expected,actual):\n ''\n\n\n\n \n missing=[]\n while expected:\n item=expected.pop()\n try :\n actual.remove(item)\n except ValueError:\n missing.append(item)\n \n \n return missing,actual\n \ndef three_way_cmp(x,y):\n ''\n return (x >y)-(x =0:\n delim=min(delim,wdelim)\n return url[start:delim],url[delim:]\n \ndef urlsplit(url,scheme='',allow_fragments=True ):\n ''\n\n\n\n \n url,scheme,_coerce_result=_coerce_args(url,scheme)\n allow_fragments=bool(allow_fragments)\n key=url,scheme,allow_fragments,type(url),type(scheme)\n cached=_parse_cache.get(key,None )\n if cached:\n return _coerce_result(cached)\n if len(_parse_cache)>=MAX_CACHE_SIZE:\n clear_cache()\n netloc=query=fragment=''\n i=url.find(':')\n if i >0:\n if url[:i]=='http':\n scheme=url[:i].lower()\n url=url[i+1:]\n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if (('['in netloc and ']'not in netloc)or\n (']'in netloc and '['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if allow_fragments and '#'in url:\n url,fragment=url.split('#',1)\n if '?'in url:\n url,query=url.split('?',1)\n v=SplitResult(scheme,netloc,url,query,fragment)\n _parse_cache[key]=v\n return _coerce_result(v)\n for c in url[:i]:\n if c not in scheme_chars:\n break\n else :\n \n \n rest=url[i+1:]\n if not rest or any(c not in '0123456789'for c in rest):\n \n scheme,url=url[:i].lower(),rest\n \n if url[:2]=='//':\n netloc,url=_splitnetloc(url,2)\n if (('['in netloc and ']'not in netloc)or\n (']'in netloc and '['not in netloc)):\n raise ValueError(\"Invalid IPv6 URL\")\n if allow_fragments and '#'in url:\n url,fragment=url.split('#',1)\n if '?'in url:\n url,query=url.split('?',1)\n v=SplitResult(scheme,netloc,url,query,fragment)\n _parse_cache[key]=v\n return _coerce_result(v)\n \ndef urlunparse(components):\n ''\n\n\n \n scheme,netloc,url,params,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if params:\n url=\"%s;%s\"%(url,params)\n return _coerce_result(urlunsplit((scheme,netloc,url,query,fragment)))\n \ndef urlunsplit(components):\n ''\n\n\n\n \n scheme,netloc,url,query,fragment,_coerce_result=(\n _coerce_args(*components))\n if netloc or (scheme and scheme in uses_netloc and url[:2]!='//'):\n if url and url[:1]!='/':url='/'+url\n url='//'+(netloc or '')+url\n if scheme:\n url=scheme+':'+url\n if query:\n url=url+'?'+query\n if fragment:\n url=url+'#'+fragment\n return _coerce_result(url)\n \ndef urljoin(base,url,allow_fragments=True ):\n ''\n \n if not base:\n return url\n if not url:\n return base\n base,url,_coerce_result=_coerce_args(base,url)\n bscheme,bnetloc,bpath,bparams,bquery,bfragment=\\\n urlparse(base,'',allow_fragments)\n scheme,netloc,path,params,query,fragment=\\\n urlparse(url,bscheme,allow_fragments)\n if scheme !=bscheme or scheme not in uses_relative:\n return _coerce_result(url)\n if scheme in uses_netloc:\n if netloc:\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n netloc=bnetloc\n if path[:1]=='/':\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n if not path and not params:\n path=bpath\n params=bparams\n if not query:\n query=bquery\n return _coerce_result(urlunparse((scheme,netloc,path,\n params,query,fragment)))\n segments=bpath.split('/')[:-1]+path.split('/')\n \n if segments[-1]=='.':\n segments[-1]=''\n while '.'in segments:\n segments.remove('.')\n while 1:\n i=1\n n=len(segments)-1\n while i =2 and segments[-1]=='..':\n segments[-2:]=['']\n return _coerce_result(urlunparse((scheme,netloc,'/'.join(segments),\n params,query,fragment)))\n \ndef urldefrag(url):\n ''\n\n\n\n\n \n url,_coerce_result=_coerce_args(url)\n if '#'in url:\n s,n,p,a,q,frag=urlparse(url)\n defrag=urlunparse((s,n,p,a,q,''))\n else :\n frag=''\n defrag=url\n return _coerce_result(DefragResult(defrag,frag))\n \n_hexdig='0123456789ABCDEFabcdef'\n_hextobyte={(a+b).encode():bytes([int(a+b,16)])\nfor a in _hexdig for b in _hexdig}\n\ndef unquote_to_bytes(string):\n ''\n \n \n if not string:\n \n string.split\n return b''\n if isinstance(string,str):\n string=string.encode('utf-8')\n bits=string.split(b'%')\n if len(bits)==1:\n return string\n res=[bits[0]]\n append=res.append\n for item in bits[1:]:\n try :\n append(_hextobyte[item[:2]])\n append(item[2:])\n except KeyError:\n append(b'%')\n append(item)\n return b''.join(res)\n \n_asciire=re.compile('([\\x00-\\x7f]+)')\n\ndef unquote(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n \n if '%'not in string:\n string.split\n return string\n if encoding is None :\n encoding='utf-8'\n if errors is None :\n errors='replace'\n bits=_asciire.split(string)\n res=[bits[0]]\n append=res.append\n for i in range(1,len(bits),2):\n append(unquote_to_bytes(bits[i]).decode(encoding,errors))\n append(bits[i+1])\n return ''.join(res)\n \ndef parse_qs(qs,keep_blank_values=False ,strict_parsing=False ,\nencoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n parsed_result={}\n pairs=parse_qsl(qs,keep_blank_values,strict_parsing,\n encoding=encoding,errors=errors)\n for name,value in pairs:\n if name in parsed_result:\n parsed_result[name].append(value)\n else :\n parsed_result[name]=[value]\n return parsed_result\n \ndef parse_qsl(qs,keep_blank_values=False ,strict_parsing=False ,\nencoding='utf-8',errors='replace'):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n qs,_coerce_result=_coerce_args(qs)\n pairs=[s2 for s1 in qs.split('&')for s2 in s1.split(';')]\n r=[]\n for name_value in pairs:\n if not name_value and not strict_parsing:\n continue\n nv=name_value.split('=',1)\n if len(nv)!=2:\n if strict_parsing:\n raise ValueError(\"bad query field: %r\"%(name_value,))\n \n if keep_blank_values:\n nv.append('')\n else :\n continue\n if len(nv[1])or keep_blank_values:\n name=nv[0].replace('+',' ')\n name=unquote(name,encoding=encoding,errors=errors)\n name=_coerce_result(name)\n value=nv[1].replace('+',' ')\n value=unquote(value,encoding=encoding,errors=errors)\n value=_coerce_result(value)\n r.append((name,value))\n return r\n \ndef unquote_plus(string,encoding='utf-8',errors='replace'):\n ''\n\n\n\n \n string=string.replace('+',' ')\n return unquote(string,encoding,errors)\n \n_ALWAYS_SAFE=frozenset(b'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\nb'abcdefghijklmnopqrstuvwxyz'\nb'0123456789'\nb'_.-')\n_ALWAYS_SAFE_BYTES=bytes(_ALWAYS_SAFE)\n_safe_quoters={}\n\nclass Quoter(collections.defaultdict):\n ''\n\n\n\n \n \n \n def __init__(self,safe):\n ''\n self.safe=_ALWAYS_SAFE.union(safe)\n \n def __repr__(self):\n \n return \"\"%dict(self)\n \n def __missing__(self,b):\n \n res=chr(b)if b in self.safe else '%{:02X}'.format(b)\n self[b]=res\n return res\n \ndef quote(string,safe='/',encoding=None ,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n if isinstance(string,str):\n if not string:\n return string\n if encoding is None :\n encoding='utf-8'\n if errors is None :\n errors='strict'\n string=string.encode(encoding,errors)\n else :\n if encoding is not None :\n raise TypeError(\"quote() doesn't support 'encoding' for bytes\")\n if errors is not None :\n raise TypeError(\"quote() doesn't support 'errors' for bytes\")\n return quote_from_bytes(string,safe)\n \ndef quote_plus(string,safe='',encoding=None ,errors=None ):\n ''\n\n\n \n \n \n if ((isinstance(string,str)and ' 'not in string)or\n (isinstance(string,bytes)and b' 'not in string)):\n return quote(string,safe,encoding,errors)\n if isinstance(safe,str):\n space=' '\n else :\n space=b' '\n string=quote(string,safe+space,encoding,errors)\n return string.replace(' ','+')\n \ndef quote_from_bytes(bs,safe='/'):\n ''\n\n\n \n if not isinstance(bs,(bytes,bytearray)):\n raise TypeError(\"quote_from_bytes() expected bytes\")\n if not bs:\n return ''\n if isinstance(safe,str):\n \n safe=safe.encode('ascii','ignore')\n else :\n safe=bytes([c for c in safe if c <128])\n if not bs.rstrip(_ALWAYS_SAFE_BYTES+safe):\n return bs.decode()\n try :\n quoter=_safe_quoters[safe]\n except KeyError:\n _safe_quoters[safe]=quoter=Quoter(safe).__getitem__\n return ''.join([quoter(char)for char in bs])\n \ndef urlencode(query,doseq=False ,safe='',encoding=None ,errors=None ):\n ''\n\n\n\n\n\n\n\n\n\n\n\n \n \n if hasattr(query,\"items\"):\n query=query.items()\n else :\n \n \n try :\n \n \n if len(query)and not isinstance(query[0],tuple):\n raise TypeError\n \n \n \n \n except TypeError:\n ty,va,tb=sys.exc_info()\n raise TypeError(\"not a valid non-string sequence \"\n \"or mapping object\").with_traceback(tb)\n \n l=[]\n if not doseq:\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_plus(k,safe)\n else :\n k=quote_plus(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_plus(v,safe)\n else :\n v=quote_plus(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else :\n for k,v in query:\n if isinstance(k,bytes):\n k=quote_plus(k,safe)\n else :\n k=quote_plus(str(k),safe,encoding,errors)\n \n if isinstance(v,bytes):\n v=quote_plus(v,safe)\n l.append(k+'='+v)\n elif isinstance(v,str):\n v=quote_plus(v,safe,encoding,errors)\n l.append(k+'='+v)\n else :\n try :\n \n x=len(v)\n except TypeError:\n \n v=quote_plus(str(v),safe,encoding,errors)\n l.append(k+'='+v)\n else :\n \n for elt in v:\n if isinstance(elt,bytes):\n elt=quote_plus(elt,safe)\n else :\n elt=quote_plus(str(elt),safe,encoding,errors)\n l.append(k+'='+elt)\n return '&'.join(l)\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \ndef to_bytes(url):\n ''\n \n \n \n if isinstance(url,str):\n try :\n url=url.encode(\"ASCII\").decode()\n except UnicodeError:\n raise UnicodeError(\"URL \"+repr(url)+\n \" contains non-ASCII characters\")\n return url\n \ndef unwrap(url):\n ''\n url=str(url).strip()\n if url[:1]=='<'and url[-1:]=='>':\n url=url[1:-1].strip()\n if url[:4]=='URL:':url=url[4:].strip()\n return url\n \n_typeprog=None\ndef splittype(url):\n ''\n global _typeprog\n if _typeprog is None :\n import re\n _typeprog=re.compile('^([^/:]+):')\n \n match=_typeprog.match(url)\n if match:\n scheme=match.group(1)\n return scheme.lower(),url[len(scheme)+1:]\n return None ,url\n \n_hostprog=None\ndef splithost(url):\n ''\n global _hostprog\n if _hostprog is None :\n import re\n _hostprog=re.compile('^//([^/?]*)(.*)$')\n \n match=_hostprog.match(url)\n if match:\n host_port=match.group(1)\n path=match.group(2)\n if path and not path.startswith('/'):\n path='/'+path\n return host_port,path\n return None ,url\n \n_userprog=None\ndef splituser(host):\n ''\n global _userprog\n if _userprog is None :\n import re\n _userprog=re.compile('^(.*)@(.*)$')\n \n match=_userprog.match(host)\n if match:return match.group(1,2)\n return None ,host\n \n_passwdprog=None\ndef splitpasswd(user):\n ''\n global _passwdprog\n if _passwdprog is None :\n import re\n _passwdprog=re.compile('^([^:]*):(.*)$',re.S)\n \n match=_passwdprog.match(user)\n if match:return match.group(1,2)\n return user,None\n \n \n_portprog=None\ndef splitport(host):\n ''\n global _portprog\n if _portprog is None :\n import re\n _portprog=re.compile('^(.*):([0-9]+)$')\n \n match=_portprog.match(host)\n if match:return match.group(1,2)\n return host,None\n \n_nportprog=None\ndef splitnport(host,defport=-1):\n ''\n\n\n \n global _nportprog\n if _nportprog is None :\n import re\n _nportprog=re.compile('^(.*):(.*)$')\n \n match=_nportprog.match(host)\n if match:\n host,port=match.group(1,2)\n try :\n if not port:raise ValueError(\"no digits\")\n nport=int(port)\n except ValueError:\n nport=None\n return host,nport\n return host,defport\n \n_queryprog=None\ndef splitquery(url):\n ''\n global _queryprog\n if _queryprog is None :\n import re\n _queryprog=re.compile('^(.*)\\?([^?]*)$')\n \n match=_queryprog.match(url)\n if match:return match.group(1,2)\n return url,None\n \n_tagprog=None\ndef splittag(url):\n ''\n global _tagprog\n if _tagprog is None :\n import re\n _tagprog=re.compile('^(.*)#([^#]*)$')\n \n match=_tagprog.match(url)\n if match:return match.group(1,2)\n return url,None\n \ndef splitattr(url):\n ''\n \n words=url.split(';')\n return words[0],words[1:]\n \n_valueprog=None\ndef splitvalue(attr):\n ''\n global _valueprog\n if _valueprog is None :\n import re\n _valueprog=re.compile('^([^=]*)=(.*)$')\n \n match=_valueprog.match(attr)\n if match:return match.group(1,2)\n return attr,None\n", ["collections", "re", "sys"]], "urllib.request": [".py", "from browser import ajax\nfrom . import error\n\nclass FileIO:\n\n def __init__(self,data):\n self._data=data\n \n def __enter__(self):\n return self\n \n def __exit__(self,*args):\n pass\n \n def read(self):\n return self._data\n \ndef urlopen(url,data=None ,timeout=None ):\n global result\n result=None\n \n def on_complete(req):\n global result\n if req.status ==200:\n result=req\n \n _ajax=ajax.ajax()\n _ajax.bind('complete',on_complete)\n if timeout is not None :\n _ajax.set_timeout(timeout)\n \n if data is None :\n _ajax.open('GET',url,False )\n _ajax.send()\n else :\n _ajax.open('POST',url,False )\n _ajax.send(data)\n \n if result is not None :\n if isinstance(result.text,str):\n return FileIO(result.text)\n \n return FileIO(result.text())\n raise error.HTTPError('file not found')\n", ["browser", "browser.ajax", "urllib", "urllib.error"]], "urllib": [".py", "", [], 1]} +__BRYTHON__.update_VFS(scripts)